diff --git a/.coveragerc b/.coveragerc index 130673a5..292fcda2 100644 --- a/.coveragerc +++ b/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3815c983..fccaa8e8 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/.github/release-please.yml b/.github/release-please.yml index 29601ad4..fe749ff6 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7092a139..e97d89e4 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d2aee5b7..16d5a9e9 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 24e46555..c0dca034 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 238b87b9..f8137d0a 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 7718391a..cbd7e77f 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index d15994ba..05dc4672 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -110,29 +113,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -174,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -303,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -321,34 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -377,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46d23716..5405cc8f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 00000000..33b5ff91 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,4 @@ +{ + ".": "2.12.0" +} + \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index f5ef46c8..be7a63b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,29 @@ [1]: https://pypi.org/project/google-cloud-monitoring/#history +## [2.12.0](https://github.com/googleapis/python-monitoring/compare/v2.11.3...v2.12.0) (2022-12-15) + + +### Features + +* Add typing to proto.Message based class attributes ([eaaca48](https://github.com/googleapis/python-monitoring/commit/eaaca4815872d78725893b0aa26ffd96d84d58d5)) + + +### Bug Fixes + +* Add dict typing for client_options ([eaaca48](https://github.com/googleapis/python-monitoring/commit/eaaca4815872d78725893b0aa26ffd96d84d58d5)) +* Add metric label example to the snippet ([#509](https://github.com/googleapis/python-monitoring/issues/509)) ([48b4e35](https://github.com/googleapis/python-monitoring/commit/48b4e35dee6066035b91214ccb44022f539cb007)) +* Add missing argument description ([#504](https://github.com/googleapis/python-monitoring/issues/504)) ([8d54a7e](https://github.com/googleapis/python-monitoring/commit/8d54a7e337b094e42ab544078f160c15ebc55921)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([eaaca48](https://github.com/googleapis/python-monitoring/commit/eaaca4815872d78725893b0aa26ffd96d84d58d5)) +* Drop usage of pkg_resources ([eaaca48](https://github.com/googleapis/python-monitoring/commit/eaaca4815872d78725893b0aa26ffd96d84d58d5)) +* Fix timeout default values ([eaaca48](https://github.com/googleapis/python-monitoring/commit/eaaca4815872d78725893b0aa26ffd96d84d58d5)) +* Remove duplicate variable declaration ([#503](https://github.com/googleapis/python-monitoring/issues/503)) ([99a981c](https://github.com/googleapis/python-monitoring/commit/99a981c9b4a53597020a30503e028ecc554b4d68)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([eaaca48](https://github.com/googleapis/python-monitoring/commit/eaaca4815872d78725893b0aa26ffd96d84d58d5)) + ## [2.11.3](https://github.com/googleapis/python-monitoring/compare/v2.11.2...v2.11.3) (2022-10-07) diff --git a/docs/index.rst b/docs/index.rst index 27fc9024..cbc85f70 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,7 +2,8 @@ .. include:: multiprocessing.rst -Api Reference + +API Reference ------------- .. toctree:: :maxdepth: 2 @@ -11,20 +12,22 @@ Api Reference monitoring_v3/services monitoring_v3/types + Migration Guide --------------- -See the guide below for instructions on migrating to the 2.x release of this library. +See the guide below for instructions on migrating to the latest version. .. toctree:: :maxdepth: 2 - UPGRADING +  UPGRADING + Changelog --------- -For a list of all previous ``google-cloud-monitoring`` releases. +For a list of all ``google-cloud-monitoring`` releases: .. toctree:: :maxdepth: 2 diff --git a/docs/monitoring_v3/types.rst b/docs/monitoring_v3/types.rst index ed0eeeef..ae49a858 100644 --- a/docs/monitoring_v3/types.rst +++ b/docs/monitoring_v3/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Monitoring v3 API .. automodule:: google.cloud.monitoring_v3.types :members: - :undoc-members: :show-inheritance: diff --git a/google/cloud/monitoring/__init__.py b/google/cloud/monitoring/__init__.py index c978631f..342e79c4 100644 --- a/google/cloud/monitoring/__init__.py +++ b/google/cloud/monitoring/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.monitoring import gapic_version as package_version + +__version__ = package_version.__version__ + from google.cloud.monitoring_v3.services.alert_policy_service.client import ( AlertPolicyServiceClient, diff --git a/samples/snippets/v3/cloud-client/quickstart_test.py b/google/cloud/monitoring/gapic_version.py similarity index 57% rename from samples/snippets/v3/cloud-client/quickstart_test.py rename to google/cloud/monitoring/gapic_version.py index d7826e92..16ae0e95 100644 --- a/samples/snippets/v3/cloud-client/quickstart_test.py +++ b/google/cloud/monitoring/gapic_version.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,22 +12,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -import os - -import backoff - -import quickstart - - -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] - - -def test_quickstart(capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - quickstart.run_quickstart(PROJECT) - out, _ = capsys.readouterr() - assert "wrote" in out - - eventually_consistent_test() +# +__version__ = "2.12.0" # {x-release-please-version} diff --git a/google/cloud/monitoring_v3/__init__.py b/google/cloud/monitoring_v3/__init__.py index 4f2e9cb2..f7e8c19f 100644 --- a/google/cloud/monitoring_v3/__init__.py +++ b/google/cloud/monitoring_v3/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.monitoring import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.alert_policy_service import AlertPolicyServiceClient from .services.alert_policy_service import AlertPolicyServiceAsyncClient diff --git a/google/cloud/monitoring_v3/gapic_version.py b/google/cloud/monitoring_v3/gapic_version.py new file mode 100644 index 00000000..16ae0e95 --- /dev/null +++ b/google/cloud/monitoring_v3/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.12.0" # {x-release-please-version} diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py b/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py index e92d8562..1d3f1206 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -181,9 +192,9 @@ def transport(self) -> AlertPolicyServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, AlertPolicyServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the alert policy service client. @@ -227,11 +238,11 @@ def __init__( async def list_alert_policies( self, - request: Union[alert_service.ListAlertPoliciesRequest, dict] = None, + request: Optional[Union[alert_service.ListAlertPoliciesRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAlertPoliciesAsyncPager: r"""Lists the existing alerting policies for the @@ -265,7 +276,7 @@ async def sample_list_alert_policies(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListAlertPoliciesRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListAlertPoliciesRequest, dict]]): The request object. The protocol for the `ListAlertPolicies` request. name (:class:`str`): @@ -362,11 +373,11 @@ async def sample_list_alert_policies(): async def get_alert_policy( self, - request: Union[alert_service.GetAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.GetAlertPolicyRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> alert.AlertPolicy: r"""Gets a single alerting policy. @@ -398,7 +409,7 @@ async def sample_get_alert_policy(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetAlertPolicyRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetAlertPolicyRequest, dict]]): The request object. The protocol for the `GetAlertPolicy` request. name (:class:`str`): @@ -480,12 +491,12 @@ async def sample_get_alert_policy(): async def create_alert_policy( self, - request: Union[alert_service.CreateAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.CreateAlertPolicyRequest, dict]] = None, *, - name: str = None, - alert_policy: alert.AlertPolicy = None, + name: Optional[str] = None, + alert_policy: Optional[alert.AlertPolicy] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> alert.AlertPolicy: r"""Creates a new alerting policy. @@ -517,7 +528,7 @@ async def sample_create_alert_policy(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.CreateAlertPolicyRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateAlertPolicyRequest, dict]]): The request object. The protocol for the `CreateAlertPolicy` request. name (:class:`str`): @@ -612,11 +623,11 @@ async def sample_create_alert_policy(): async def delete_alert_policy( self, - request: Union[alert_service.DeleteAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.DeleteAlertPolicyRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an alerting policy. @@ -645,7 +656,7 @@ async def sample_delete_alert_policy(): await client.delete_alert_policy(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.DeleteAlertPolicyRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.DeleteAlertPolicyRequest, dict]]): The request object. The protocol for the `DeleteAlertPolicy` request. name (:class:`str`): @@ -717,12 +728,12 @@ async def sample_delete_alert_policy(): async def update_alert_policy( self, - request: Union[alert_service.UpdateAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.UpdateAlertPolicyRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - alert_policy: alert.AlertPolicy = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + alert_policy: Optional[alert.AlertPolicy] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> alert.AlertPolicy: r"""Updates an alerting policy. You can either replace the entire @@ -756,7 +767,7 @@ async def sample_update_alert_policy(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.UpdateAlertPolicyRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.UpdateAlertPolicyRequest, dict]]): The request object. The protocol for the `UpdateAlertPolicy` request. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -869,14 +880,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("AlertPolicyServiceAsyncClient",) diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/client.py b/google/cloud/monitoring_v3/services/alert_policy_service/client.py index 39d7ead9..d736a914 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/client.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -62,7 +74,7 @@ class AlertPolicyServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[AlertPolicyServiceTransport]: """Returns an appropriate transport class. @@ -365,8 +377,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, AlertPolicyServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, AlertPolicyServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the alert policy service client. @@ -380,7 +392,7 @@ def __init__( transport (Union[str, AlertPolicyServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -410,6 +422,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -462,11 +475,11 @@ def __init__( def list_alert_policies( self, - request: Union[alert_service.ListAlertPoliciesRequest, dict] = None, + request: Optional[Union[alert_service.ListAlertPoliciesRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAlertPoliciesPager: r"""Lists the existing alerting policies for the @@ -588,11 +601,11 @@ def sample_list_alert_policies(): def get_alert_policy( self, - request: Union[alert_service.GetAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.GetAlertPolicyRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> alert.AlertPolicy: r"""Gets a single alerting policy. @@ -697,12 +710,12 @@ def sample_get_alert_policy(): def create_alert_policy( self, - request: Union[alert_service.CreateAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.CreateAlertPolicyRequest, dict]] = None, *, - name: str = None, - alert_policy: alert.AlertPolicy = None, + name: Optional[str] = None, + alert_policy: Optional[alert.AlertPolicy] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> alert.AlertPolicy: r"""Creates a new alerting policy. @@ -829,11 +842,11 @@ def sample_create_alert_policy(): def delete_alert_policy( self, - request: Union[alert_service.DeleteAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.DeleteAlertPolicyRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an alerting policy. @@ -925,12 +938,12 @@ def sample_delete_alert_policy(): def update_alert_policy( self, - request: Union[alert_service.UpdateAlertPolicyRequest, dict] = None, + request: Optional[Union[alert_service.UpdateAlertPolicyRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - alert_policy: alert.AlertPolicy = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + alert_policy: Optional[alert.AlertPolicy] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> alert.AlertPolicy: r"""Updates an alerting policy. You can either replace the entire @@ -1084,14 +1097,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("AlertPolicyServiceClient",) diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py b/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py index 9808d581..a7059c7f 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.monitoring_v3 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -29,14 +30,9 @@ from google.cloud.monitoring_v3.types import alert_service from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class AlertPolicyServiceTransport(abc.ABC): @@ -54,7 +50,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py b/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py index 2e0b3ec1..38df436c 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc.py @@ -57,14 +57,14 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -191,8 +191,8 @@ def __init__( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py b/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py index 547b645b..72b80b6e 100644 --- a/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py +++ b/google/cloud/monitoring_v3/services/alert_policy_service/transports/grpc_asyncio.py @@ -59,7 +59,7 @@ class AlertPolicyServiceGrpcAsyncIOTransport(AlertPolicyServiceTransport): def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -102,15 +102,15 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/group_service/async_client.py b/google/cloud/monitoring_v3/services/group_service/async_client.py index 07b299d6..b01c1471 100644 --- a/google/cloud/monitoring_v3/services/group_service/async_client.py +++ b/google/cloud/monitoring_v3/services/group_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -169,9 +180,9 @@ def transport(self) -> GroupServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, GroupServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the group service client. @@ -215,11 +226,11 @@ def __init__( async def list_groups( self, - request: Union[group_service.ListGroupsRequest, dict] = None, + request: Optional[Union[group_service.ListGroupsRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListGroupsAsyncPager: r"""Lists the existing groups. @@ -253,7 +264,7 @@ async def sample_list_groups(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListGroupsRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListGroupsRequest, dict]]): The request object. The `ListGroup` request. name (:class:`str`): Required. The @@ -343,11 +354,11 @@ async def sample_list_groups(): async def get_group( self, - request: Union[group_service.GetGroupRequest, dict] = None, + request: Optional[Union[group_service.GetGroupRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> group.Group: r"""Gets a single group. @@ -379,7 +390,7 @@ async def sample_get_group(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetGroupRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetGroupRequest, dict]]): The request object. The `GetGroup` request. name (:class:`str`): Required. The group to retrieve. The format is: @@ -487,12 +498,12 @@ async def sample_get_group(): async def create_group( self, - request: Union[group_service.CreateGroupRequest, dict] = None, + request: Optional[Union[group_service.CreateGroupRequest, dict]] = None, *, - name: str = None, - group: gm_group.Group = None, + name: Optional[str] = None, + group: Optional[gm_group.Group] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_group.Group: r"""Creates a new group. @@ -524,7 +535,7 @@ async def sample_create_group(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.CreateGroupRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateGroupRequest, dict]]): The request object. The `CreateGroup` request. name (:class:`str`): Required. The @@ -634,11 +645,11 @@ async def sample_create_group(): async def update_group( self, - request: Union[group_service.UpdateGroupRequest, dict] = None, + request: Optional[Union[group_service.UpdateGroupRequest, dict]] = None, *, - group: gm_group.Group = None, + group: Optional[gm_group.Group] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_group.Group: r"""Updates an existing group. You can change any group attributes @@ -670,7 +681,7 @@ async def sample_update_group(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.UpdateGroupRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.UpdateGroupRequest, dict]]): The request object. The `UpdateGroup` request. group (:class:`google.cloud.monitoring_v3.types.Group`): Required. The new definition of the group. All fields of @@ -778,11 +789,11 @@ async def sample_update_group(): async def delete_group( self, - request: Union[group_service.DeleteGroupRequest, dict] = None, + request: Optional[Union[group_service.DeleteGroupRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an existing group. @@ -811,7 +822,7 @@ async def sample_delete_group(): await client.delete_group(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.DeleteGroupRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.DeleteGroupRequest, dict]]): The request object. The `DeleteGroup` request. The default behavior is to be able to delete a single group without any descendants. @@ -881,11 +892,11 @@ async def sample_delete_group(): async def list_group_members( self, - request: Union[group_service.ListGroupMembersRequest, dict] = None, + request: Optional[Union[group_service.ListGroupMembersRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListGroupMembersAsyncPager: r"""Lists the monitored resources that are members of a @@ -919,7 +930,7 @@ async def sample_list_group_members(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListGroupMembersRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListGroupMembersRequest, dict]]): The request object. The `ListGroupMembers` request. name (:class:`str`): Required. The group whose members are listed. The format @@ -1013,14 +1024,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("GroupServiceAsyncClient",) diff --git a/google/cloud/monitoring_v3/services/group_service/client.py b/google/cloud/monitoring_v3/services/group_service/client.py index bf6ef6f4..4019983d 100644 --- a/google/cloud/monitoring_v3/services/group_service/client.py +++ b/google/cloud/monitoring_v3/services/group_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -58,7 +70,7 @@ class GroupServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[GroupServiceTransport]: """Returns an appropriate transport class. @@ -340,8 +352,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, GroupServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, GroupServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the group service client. @@ -355,7 +367,7 @@ def __init__( transport (Union[str, GroupServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -385,6 +397,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -437,11 +450,11 @@ def __init__( def list_groups( self, - request: Union[group_service.ListGroupsRequest, dict] = None, + request: Optional[Union[group_service.ListGroupsRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListGroupsPager: r"""Lists the existing groups. @@ -556,11 +569,11 @@ def sample_list_groups(): def get_group( self, - request: Union[group_service.GetGroupRequest, dict] = None, + request: Optional[Union[group_service.GetGroupRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> group.Group: r"""Gets a single group. @@ -691,12 +704,12 @@ def sample_get_group(): def create_group( self, - request: Union[group_service.CreateGroupRequest, dict] = None, + request: Optional[Union[group_service.CreateGroupRequest, dict]] = None, *, - name: str = None, - group: gm_group.Group = None, + name: Optional[str] = None, + group: Optional[gm_group.Group] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_group.Group: r"""Creates a new group. @@ -838,11 +851,11 @@ def sample_create_group(): def update_group( self, - request: Union[group_service.UpdateGroupRequest, dict] = None, + request: Optional[Union[group_service.UpdateGroupRequest, dict]] = None, *, - group: gm_group.Group = None, + group: Optional[gm_group.Group] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_group.Group: r"""Updates an existing group. You can change any group attributes @@ -973,11 +986,11 @@ def sample_update_group(): def delete_group( self, - request: Union[group_service.DeleteGroupRequest, dict] = None, + request: Optional[Union[group_service.DeleteGroupRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an existing group. @@ -1067,11 +1080,11 @@ def sample_delete_group(): def list_group_members( self, - request: Union[group_service.ListGroupMembersRequest, dict] = None, + request: Optional[Union[group_service.ListGroupMembersRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListGroupMembersPager: r"""Lists the monitored resources that are members of a @@ -1197,14 +1210,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("GroupServiceClient",) diff --git a/google/cloud/monitoring_v3/services/group_service/transports/base.py b/google/cloud/monitoring_v3/services/group_service/transports/base.py index a55b13f6..fee17266 100644 --- a/google/cloud/monitoring_v3/services/group_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/group_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.monitoring_v3 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,9 @@ from google.cloud.monitoring_v3.types import group_service from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class GroupServiceTransport(abc.ABC): @@ -55,7 +51,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/group_service/transports/grpc.py b/google/cloud/monitoring_v3/services/group_service/transports/grpc.py index 90ef90a7..bae90bc0 100644 --- a/google/cloud/monitoring_v3/services/group_service/transports/grpc.py +++ b/google/cloud/monitoring_v3/services/group_service/transports/grpc.py @@ -61,14 +61,14 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -195,8 +195,8 @@ def __init__( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py b/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py index 42294ced..5e73cf98 100644 --- a/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py +++ b/google/cloud/monitoring_v3/services/group_service/transports/grpc_asyncio.py @@ -63,7 +63,7 @@ class GroupServiceGrpcAsyncIOTransport(GroupServiceTransport): def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -106,15 +106,15 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/metric_service/async_client.py b/google/cloud/monitoring_v3/services/metric_service/async_client.py index 1e9dc7f1..ef58bc5c 100644 --- a/google/cloud/monitoring_v3/services/metric_service/async_client.py +++ b/google/cloud/monitoring_v3/services/metric_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -174,9 +185,9 @@ def transport(self) -> MetricServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the metric service client. @@ -220,13 +231,13 @@ def __init__( async def list_monitored_resource_descriptors( self, - request: Union[ - metric_service.ListMonitoredResourceDescriptorsRequest, dict + request: Optional[ + Union[metric_service.ListMonitoredResourceDescriptorsRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists monitored resource descriptors that match a @@ -260,7 +271,7 @@ async def sample_list_monitored_resource_descriptors(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListMonitoredResourceDescriptorsRequest, dict]]): The request object. The `ListMonitoredResourceDescriptors` request. name (:class:`str`): @@ -351,13 +362,13 @@ async def sample_list_monitored_resource_descriptors(): async def get_monitored_resource_descriptor( self, - request: Union[ - metric_service.GetMonitoredResourceDescriptorRequest, dict + request: Optional[ + Union[metric_service.GetMonitoredResourceDescriptorRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> monitored_resource_pb2.MonitoredResourceDescriptor: r"""Gets a single monitored resource descriptor. This @@ -390,7 +401,7 @@ async def sample_get_monitored_resource_descriptor(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetMonitoredResourceDescriptorRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetMonitoredResourceDescriptorRequest, dict]]): The request object. The `GetMonitoredResourceDescriptor` request. name (:class:`str`): @@ -481,11 +492,13 @@ async def sample_get_monitored_resource_descriptor(): async def list_metric_descriptors( self, - request: Union[metric_service.ListMetricDescriptorsRequest, dict] = None, + request: Optional[ + Union[metric_service.ListMetricDescriptorsRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMetricDescriptorsAsyncPager: r"""Lists metric descriptors that match a filter. This @@ -519,7 +532,7 @@ async def sample_list_metric_descriptors(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListMetricDescriptorsRequest, dict]]): The request object. The `ListMetricDescriptors` request. name (:class:`str`): Required. The @@ -609,11 +622,13 @@ async def sample_list_metric_descriptors(): async def get_metric_descriptor( self, - request: Union[metric_service.GetMetricDescriptorRequest, dict] = None, + request: Optional[ + Union[metric_service.GetMetricDescriptorRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> metric_pb2.MetricDescriptor: r"""Gets a single metric descriptor. This method does not @@ -646,7 +661,7 @@ async def sample_get_metric_descriptor(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetMetricDescriptorRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetMetricDescriptorRequest, dict]]): The request object. The `GetMetricDescriptor` request. name (:class:`str`): Required. The metric descriptor on which to execute the @@ -730,12 +745,14 @@ async def sample_get_metric_descriptor(): async def create_metric_descriptor( self, - request: Union[metric_service.CreateMetricDescriptorRequest, dict] = None, + request: Optional[ + Union[metric_service.CreateMetricDescriptorRequest, dict] + ] = None, *, - name: str = None, - metric_descriptor: metric_pb2.MetricDescriptor = None, + name: Optional[str] = None, + metric_descriptor: Optional[metric_pb2.MetricDescriptor] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> metric_pb2.MetricDescriptor: r"""Creates a new metric descriptor. The creation is executed @@ -771,7 +788,7 @@ async def sample_create_metric_descriptor(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.CreateMetricDescriptorRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateMetricDescriptorRequest, dict]]): The request object. The `CreateMetricDescriptor` request. name (:class:`str`): @@ -852,11 +869,13 @@ async def sample_create_metric_descriptor(): async def delete_metric_descriptor( self, - request: Union[metric_service.DeleteMetricDescriptorRequest, dict] = None, + request: Optional[ + Union[metric_service.DeleteMetricDescriptorRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a metric descriptor. Only user-created `custom @@ -887,7 +906,7 @@ async def sample_delete_metric_descriptor(): await client.delete_metric_descriptor(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.DeleteMetricDescriptorRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.DeleteMetricDescriptorRequest, dict]]): The request object. The `DeleteMetricDescriptor` request. name (:class:`str`): @@ -960,14 +979,14 @@ async def sample_delete_metric_descriptor(): async def list_time_series( self, - request: Union[metric_service.ListTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.ListTimeSeriesRequest, dict]] = None, *, - name: str = None, - filter: str = None, - interval: common.TimeInterval = None, - view: metric_service.ListTimeSeriesRequest.TimeSeriesView = None, + name: Optional[str] = None, + filter: Optional[str] = None, + interval: Optional[common.TimeInterval] = None, + view: Optional[metric_service.ListTimeSeriesRequest.TimeSeriesView] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTimeSeriesAsyncPager: r"""Lists time series that match a filter. This method @@ -1003,7 +1022,7 @@ async def sample_list_time_series(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListTimeSeriesRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListTimeSeriesRequest, dict]]): The request object. The `ListTimeSeries` request. name (:class:`str`): Required. The @@ -1135,12 +1154,12 @@ async def sample_list_time_series(): async def create_time_series( self, - request: Union[metric_service.CreateTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.CreateTimeSeriesRequest, dict]] = None, *, - name: str = None, - time_series: Sequence[gm_metric.TimeSeries] = None, + name: Optional[str] = None, + time_series: Optional[MutableSequence[gm_metric.TimeSeries]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Creates or adds data to one or more time series. @@ -1173,7 +1192,7 @@ async def sample_create_time_series(): await client.create_time_series(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.CreateTimeSeriesRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateTimeSeriesRequest, dict]]): The request object. The `CreateTimeSeries` request. name (:class:`str`): Required. The @@ -1187,7 +1206,7 @@ async def sample_create_time_series(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - time_series (:class:`Sequence[google.cloud.monitoring_v3.types.TimeSeries]`): + time_series (:class:`MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]`): Required. The new data to be added to a list of time series. Adds at most one data point to each of several time series. The new data point must be more recent than @@ -1251,12 +1270,12 @@ async def sample_create_time_series(): async def create_service_time_series( self, - request: Union[metric_service.CreateTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.CreateTimeSeriesRequest, dict]] = None, *, - name: str = None, - time_series: Sequence[gm_metric.TimeSeries] = None, + name: Optional[str] = None, + time_series: Optional[MutableSequence[gm_metric.TimeSeries]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Creates or adds data to one or more service time series. A @@ -1293,7 +1312,7 @@ async def sample_create_service_time_series(): await client.create_service_time_series(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.CreateTimeSeriesRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateTimeSeriesRequest, dict]]): The request object. The `CreateTimeSeries` request. name (:class:`str`): Required. The @@ -1307,7 +1326,7 @@ async def sample_create_service_time_series(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - time_series (:class:`Sequence[google.cloud.monitoring_v3.types.TimeSeries]`): + time_series (:class:`MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]`): Required. The new data to be added to a list of time series. Adds at most one data point to each of several time series. The new data point must be more recent than @@ -1376,14 +1395,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("MetricServiceAsyncClient",) diff --git a/google/cloud/monitoring_v3/services/metric_service/client.py b/google/cloud/monitoring_v3/services/metric_service/client.py index f1b516d9..b21a1448 100644 --- a/google/cloud/monitoring_v3/services/metric_service/client.py +++ b/google/cloud/monitoring_v3/services/metric_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -61,7 +73,7 @@ class MetricServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[MetricServiceTransport]: """Returns an appropriate transport class. @@ -366,8 +378,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, MetricServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the metric service client. @@ -381,7 +393,7 @@ def __init__( transport (Union[str, MetricServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -411,6 +423,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -463,13 +476,13 @@ def __init__( def list_monitored_resource_descriptors( self, - request: Union[ - metric_service.ListMonitoredResourceDescriptorsRequest, dict + request: Optional[ + Union[metric_service.ListMonitoredResourceDescriptorsRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists monitored resource descriptors that match a @@ -589,13 +602,13 @@ def sample_list_monitored_resource_descriptors(): def get_monitored_resource_descriptor( self, - request: Union[ - metric_service.GetMonitoredResourceDescriptorRequest, dict + request: Optional[ + Union[metric_service.GetMonitoredResourceDescriptorRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> monitored_resource_pb2.MonitoredResourceDescriptor: r"""Gets a single monitored resource descriptor. This @@ -714,11 +727,13 @@ def sample_get_monitored_resource_descriptor(): def list_metric_descriptors( self, - request: Union[metric_service.ListMetricDescriptorsRequest, dict] = None, + request: Optional[ + Union[metric_service.ListMetricDescriptorsRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMetricDescriptorsPager: r"""Lists metric descriptors that match a filter. This @@ -833,11 +848,13 @@ def sample_list_metric_descriptors(): def get_metric_descriptor( self, - request: Union[metric_service.GetMetricDescriptorRequest, dict] = None, + request: Optional[ + Union[metric_service.GetMetricDescriptorRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> metric_pb2.MetricDescriptor: r"""Gets a single metric descriptor. This method does not @@ -945,12 +962,14 @@ def sample_get_metric_descriptor(): def create_metric_descriptor( self, - request: Union[metric_service.CreateMetricDescriptorRequest, dict] = None, + request: Optional[ + Union[metric_service.CreateMetricDescriptorRequest, dict] + ] = None, *, - name: str = None, - metric_descriptor: metric_pb2.MetricDescriptor = None, + name: Optional[str] = None, + metric_descriptor: Optional[metric_pb2.MetricDescriptor] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> metric_pb2.MetricDescriptor: r"""Creates a new metric descriptor. The creation is executed @@ -1067,11 +1086,13 @@ def sample_create_metric_descriptor(): def delete_metric_descriptor( self, - request: Union[metric_service.DeleteMetricDescriptorRequest, dict] = None, + request: Optional[ + Union[metric_service.DeleteMetricDescriptorRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a metric descriptor. Only user-created `custom @@ -1166,14 +1187,14 @@ def sample_delete_metric_descriptor(): def list_time_series( self, - request: Union[metric_service.ListTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.ListTimeSeriesRequest, dict]] = None, *, - name: str = None, - filter: str = None, - interval: common.TimeInterval = None, - view: metric_service.ListTimeSeriesRequest.TimeSeriesView = None, + name: Optional[str] = None, + filter: Optional[str] = None, + interval: Optional[common.TimeInterval] = None, + view: Optional[metric_service.ListTimeSeriesRequest.TimeSeriesView] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTimeSeriesPager: r"""Lists time series that match a filter. This method @@ -1332,12 +1353,12 @@ def sample_list_time_series(): def create_time_series( self, - request: Union[metric_service.CreateTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.CreateTimeSeriesRequest, dict]] = None, *, - name: str = None, - time_series: Sequence[gm_metric.TimeSeries] = None, + name: Optional[str] = None, + time_series: Optional[MutableSequence[gm_metric.TimeSeries]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Creates or adds data to one or more time series. @@ -1384,7 +1405,7 @@ def sample_create_time_series(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): + time_series (MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]): Required. The new data to be added to a list of time series. Adds at most one data point to each of several time series. The new data point must be more recent than @@ -1448,12 +1469,12 @@ def sample_create_time_series(): def create_service_time_series( self, - request: Union[metric_service.CreateTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.CreateTimeSeriesRequest, dict]] = None, *, - name: str = None, - time_series: Sequence[gm_metric.TimeSeries] = None, + name: Optional[str] = None, + time_series: Optional[MutableSequence[gm_metric.TimeSeries]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Creates or adds data to one or more service time series. A @@ -1504,7 +1525,7 @@ def sample_create_service_time_series(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): + time_series (MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]): Required. The new data to be added to a list of time series. Adds at most one data point to each of several time series. The new data point must be more recent than @@ -1582,14 +1603,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("MetricServiceClient",) diff --git a/google/cloud/monitoring_v3/services/metric_service/transports/base.py b/google/cloud/monitoring_v3/services/metric_service/transports/base.py index 1e4922d1..0c7bfc6e 100644 --- a/google/cloud/monitoring_v3/services/metric_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/metric_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.monitoring_v3 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,9 @@ from google.cloud.monitoring_v3.types import metric_service from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class MetricServiceTransport(abc.ABC): @@ -56,7 +52,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py b/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py index 0b9e02d6..d8bb6457 100644 --- a/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py +++ b/google/cloud/monitoring_v3/services/metric_service/transports/grpc.py @@ -51,14 +51,14 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -185,8 +185,8 @@ def __init__( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py b/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py index 3d7fda0e..6bb1bec2 100644 --- a/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py +++ b/google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py @@ -53,7 +53,7 @@ class MetricServiceGrpcAsyncIOTransport(MetricServiceTransport): def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -96,15 +96,15 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py b/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py index 0b19fb28..96e9ea01 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -185,9 +196,9 @@ def transport(self) -> NotificationChannelServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, NotificationChannelServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the notification channel service client. @@ -231,13 +242,13 @@ def __init__( async def list_notification_channel_descriptors( self, - request: Union[ - notification_service.ListNotificationChannelDescriptorsRequest, dict + request: Optional[ + Union[notification_service.ListNotificationChannelDescriptorsRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListNotificationChannelDescriptorsAsyncPager: r"""Lists the descriptors for supported channel types. @@ -272,7 +283,7 @@ async def sample_list_notification_channel_descriptors(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListNotificationChannelDescriptorsRequest, dict]]): The request object. The `ListNotificationChannelDescriptors` request. name (:class:`str`): @@ -373,13 +384,13 @@ async def sample_list_notification_channel_descriptors(): async def get_notification_channel_descriptor( self, - request: Union[ - notification_service.GetNotificationChannelDescriptorRequest, dict + request: Optional[ + Union[notification_service.GetNotificationChannelDescriptorRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannelDescriptor: r"""Gets a single channel descriptor. The descriptor @@ -413,7 +424,7 @@ async def sample_get_notification_channel_descriptor(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetNotificationChannelDescriptorRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetNotificationChannelDescriptorRequest, dict]]): The request object. The `GetNotificationChannelDescriptor` response. name (:class:`str`): @@ -495,13 +506,13 @@ async def sample_get_notification_channel_descriptor(): async def list_notification_channels( self, - request: Union[ - notification_service.ListNotificationChannelsRequest, dict + request: Optional[ + Union[notification_service.ListNotificationChannelsRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListNotificationChannelsAsyncPager: r"""Lists the notification channels that have been @@ -535,7 +546,7 @@ async def sample_list_notification_channels(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListNotificationChannelsRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListNotificationChannelsRequest, dict]]): The request object. The `ListNotificationChannels` request. name (:class:`str`): @@ -633,11 +644,13 @@ async def sample_list_notification_channels(): async def get_notification_channel( self, - request: Union[notification_service.GetNotificationChannelRequest, dict] = None, + request: Optional[ + Union[notification_service.GetNotificationChannelRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Gets a single notification channel. The channel @@ -675,7 +688,7 @@ async def sample_get_notification_channel(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetNotificationChannelRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetNotificationChannelRequest, dict]]): The request object. The `GetNotificationChannel` request. name (:class:`str`): @@ -759,14 +772,14 @@ async def sample_get_notification_channel(): async def create_notification_channel( self, - request: Union[ - notification_service.CreateNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.CreateNotificationChannelRequest, dict] ] = None, *, - name: str = None, - notification_channel: notification.NotificationChannel = None, + name: Optional[str] = None, + notification_channel: Optional[notification.NotificationChannel] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Creates a new notification channel, representing a @@ -800,7 +813,7 @@ async def sample_create_notification_channel(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.CreateNotificationChannelRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateNotificationChannelRequest, dict]]): The request object. The `CreateNotificationChannel` request. name (:class:`str`): @@ -892,14 +905,14 @@ async def sample_create_notification_channel(): async def update_notification_channel( self, - request: Union[ - notification_service.UpdateNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.UpdateNotificationChannelRequest, dict] ] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - notification_channel: notification.NotificationChannel = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + notification_channel: Optional[notification.NotificationChannel] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Updates a notification channel. Fields not specified @@ -931,7 +944,7 @@ async def sample_update_notification_channel(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.UpdateNotificationChannelRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.UpdateNotificationChannelRequest, dict]]): The request object. The `UpdateNotificationChannel` request. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -1014,14 +1027,14 @@ async def sample_update_notification_channel(): async def delete_notification_channel( self, - request: Union[ - notification_service.DeleteNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.DeleteNotificationChannelRequest, dict] ] = None, *, - name: str = None, - force: bool = None, + name: Optional[str] = None, + force: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a notification channel. @@ -1050,7 +1063,7 @@ async def sample_delete_notification_channel(): await client.delete_notification_channel(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.DeleteNotificationChannelRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.DeleteNotificationChannelRequest, dict]]): The request object. The `DeleteNotificationChannel` request. name (:class:`str`): @@ -1135,13 +1148,16 @@ async def sample_delete_notification_channel(): async def send_notification_channel_verification_code( self, - request: Union[ - notification_service.SendNotificationChannelVerificationCodeRequest, dict + request: Optional[ + Union[ + notification_service.SendNotificationChannelVerificationCodeRequest, + dict, + ] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Causes a verification code to be delivered to the channel. The @@ -1172,7 +1188,7 @@ async def sample_send_notification_channel_verification_code(): await client.send_notification_channel_verification_code(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.SendNotificationChannelVerificationCodeRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.SendNotificationChannelVerificationCodeRequest, dict]]): The request object. The `SendNotificationChannelVerificationCode` request. name (:class:`str`): @@ -1231,13 +1247,15 @@ async def sample_send_notification_channel_verification_code(): async def get_notification_channel_verification_code( self, - request: Union[ - notification_service.GetNotificationChannelVerificationCodeRequest, dict + request: Optional[ + Union[ + notification_service.GetNotificationChannelVerificationCodeRequest, dict + ] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification_service.GetNotificationChannelVerificationCodeResponse: r"""Requests a verification code for an already verified @@ -1296,7 +1314,7 @@ async def sample_get_notification_channel_verification_code(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetNotificationChannelVerificationCodeRequest, dict]]): The request object. The `GetNotificationChannelVerificationCode` request. name (:class:`str`): @@ -1375,14 +1393,14 @@ async def sample_get_notification_channel_verification_code(): async def verify_notification_channel( self, - request: Union[ - notification_service.VerifyNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.VerifyNotificationChannelRequest, dict] ] = None, *, - name: str = None, - code: str = None, + name: Optional[str] = None, + code: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Verifies a ``NotificationChannel`` by proving receipt of the @@ -1417,7 +1435,7 @@ async def sample_verify_notification_channel(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.VerifyNotificationChannelRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.VerifyNotificationChannelRequest, dict]]): The request object. The `VerifyNotificationChannel` request. name (:class:`str`): @@ -1518,14 +1536,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("NotificationChannelServiceAsyncClient",) diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/client.py b/google/cloud/monitoring_v3/services/notification_channel_service/client.py index 64360045..b1987573 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/client.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -65,7 +77,7 @@ class NotificationChannelServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[NotificationChannelServiceTransport]: """Returns an appropriate transport class. @@ -360,8 +372,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, NotificationChannelServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, NotificationChannelServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the notification channel service client. @@ -375,7 +387,7 @@ def __init__( transport (Union[str, NotificationChannelServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -405,6 +417,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -457,13 +470,13 @@ def __init__( def list_notification_channel_descriptors( self, - request: Union[ - notification_service.ListNotificationChannelDescriptorsRequest, dict + request: Optional[ + Union[notification_service.ListNotificationChannelDescriptorsRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListNotificationChannelDescriptorsPager: r"""Lists the descriptors for supported channel types. @@ -594,13 +607,13 @@ def sample_list_notification_channel_descriptors(): def get_notification_channel_descriptor( self, - request: Union[ - notification_service.GetNotificationChannelDescriptorRequest, dict + request: Optional[ + Union[notification_service.GetNotificationChannelDescriptorRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannelDescriptor: r"""Gets a single channel descriptor. The descriptor @@ -713,13 +726,13 @@ def sample_get_notification_channel_descriptor(): def list_notification_channels( self, - request: Union[ - notification_service.ListNotificationChannelsRequest, dict + request: Optional[ + Union[notification_service.ListNotificationChannelsRequest, dict] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListNotificationChannelsPager: r"""Lists the notification channels that have been @@ -846,11 +859,13 @@ def sample_list_notification_channels(): def get_notification_channel( self, - request: Union[notification_service.GetNotificationChannelRequest, dict] = None, + request: Optional[ + Union[notification_service.GetNotificationChannelRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Gets a single notification channel. The channel @@ -963,14 +978,14 @@ def sample_get_notification_channel(): def create_notification_channel( self, - request: Union[ - notification_service.CreateNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.CreateNotificationChannelRequest, dict] ] = None, *, - name: str = None, - notification_channel: notification.NotificationChannel = None, + name: Optional[str] = None, + notification_channel: Optional[notification.NotificationChannel] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Creates a new notification channel, representing a @@ -1100,14 +1115,14 @@ def sample_create_notification_channel(): def update_notification_channel( self, - request: Union[ - notification_service.UpdateNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.UpdateNotificationChannelRequest, dict] ] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - notification_channel: notification.NotificationChannel = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + notification_channel: Optional[notification.NotificationChannel] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Updates a notification channel. Fields not specified @@ -1226,14 +1241,14 @@ def sample_update_notification_channel(): def delete_notification_channel( self, - request: Union[ - notification_service.DeleteNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.DeleteNotificationChannelRequest, dict] ] = None, *, - name: str = None, - force: bool = None, + name: Optional[str] = None, + force: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a notification channel. @@ -1342,13 +1357,16 @@ def sample_delete_notification_channel(): def send_notification_channel_verification_code( self, - request: Union[ - notification_service.SendNotificationChannelVerificationCodeRequest, dict + request: Optional[ + Union[ + notification_service.SendNotificationChannelVerificationCodeRequest, + dict, + ] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Causes a verification code to be delivered to the channel. The @@ -1444,13 +1462,15 @@ def sample_send_notification_channel_verification_code(): def get_notification_channel_verification_code( self, - request: Union[ - notification_service.GetNotificationChannelVerificationCodeRequest, dict + request: Optional[ + Union[ + notification_service.GetNotificationChannelVerificationCodeRequest, dict + ] ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification_service.GetNotificationChannelVerificationCodeResponse: r"""Requests a verification code for an already verified @@ -1585,14 +1605,14 @@ def sample_get_notification_channel_verification_code(): def verify_notification_channel( self, - request: Union[ - notification_service.VerifyNotificationChannelRequest, dict + request: Optional[ + Union[notification_service.VerifyNotificationChannelRequest, dict] ] = None, *, - name: str = None, - code: str = None, + name: Optional[str] = None, + code: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> notification.NotificationChannel: r"""Verifies a ``NotificationChannel`` by proving receipt of the @@ -1730,14 +1750,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("NotificationChannelServiceClient",) diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py b/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py index a76bee6c..914074eb 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.monitoring_v3 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -29,14 +30,9 @@ from google.cloud.monitoring_v3.types import notification_service from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class NotificationChannelServiceTransport(abc.ABC): @@ -54,7 +50,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py b/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py index ee3db7ec..ebe382eb 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc.py @@ -50,14 +50,14 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -184,8 +184,8 @@ def __init__( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py b/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py index f81f2a95..9a0e4059 100644 --- a/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py +++ b/google/cloud/monitoring_v3/services/notification_channel_service/transports/grpc_asyncio.py @@ -54,7 +54,7 @@ class NotificationChannelServiceGrpcAsyncIOTransport( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -97,15 +97,15 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/query_service/async_client.py b/google/cloud/monitoring_v3/services/query_service/async_client.py index 3cc4dacd..68dc784f 100644 --- a/google/cloud/monitoring_v3/services/query_service/async_client.py +++ b/google/cloud/monitoring_v3/services/query_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -156,9 +167,9 @@ def transport(self) -> QueryServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, QueryServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the query service client. @@ -202,10 +213,10 @@ def __init__( async def query_time_series( self, - request: Union[metric_service.QueryTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.QueryTimeSeriesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.QueryTimeSeriesAsyncPager: r"""Queries time series using Monitoring Query Language. @@ -240,7 +251,7 @@ async def sample_query_time_series(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.QueryTimeSeriesRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.QueryTimeSeriesRequest, dict]]): The request object. The `QueryTimeSeries` request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -300,14 +311,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("QueryServiceAsyncClient",) diff --git a/google/cloud/monitoring_v3/services/query_service/client.py b/google/cloud/monitoring_v3/services/query_service/client.py index 1bf2b78f..969bed72 100644 --- a/google/cloud/monitoring_v3/services/query_service/client.py +++ b/google/cloud/monitoring_v3/services/query_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -56,7 +68,7 @@ class QueryServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[QueryServiceTransport]: """Returns an appropriate transport class. @@ -312,8 +324,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, QueryServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, QueryServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the query service client. @@ -327,7 +339,7 @@ def __init__( transport (Union[str, QueryServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -357,6 +369,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -409,10 +422,10 @@ def __init__( def query_time_series( self, - request: Union[metric_service.QueryTimeSeriesRequest, dict] = None, + request: Optional[Union[metric_service.QueryTimeSeriesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.QueryTimeSeriesPager: r"""Queries time series using Monitoring Query Language. @@ -515,14 +528,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("QueryServiceClient",) diff --git a/google/cloud/monitoring_v3/services/query_service/transports/base.py b/google/cloud/monitoring_v3/services/query_service/transports/base.py index efddb1b8..e60ced7c 100644 --- a/google/cloud/monitoring_v3/services/query_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/query_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.monitoring_v3 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -27,14 +28,9 @@ from google.cloud.monitoring_v3.types import metric_service -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class QueryServiceTransport(abc.ABC): @@ -52,7 +48,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/query_service/transports/grpc.py b/google/cloud/monitoring_v3/services/query_service/transports/grpc.py index e5c0496c..2e85015e 100644 --- a/google/cloud/monitoring_v3/services/query_service/transports/grpc.py +++ b/google/cloud/monitoring_v3/services/query_service/transports/grpc.py @@ -49,14 +49,14 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -183,8 +183,8 @@ def __init__( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py b/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py index 87058273..55620ee7 100644 --- a/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py +++ b/google/cloud/monitoring_v3/services/query_service/transports/grpc_asyncio.py @@ -51,7 +51,7 @@ class QueryServiceGrpcAsyncIOTransport(QueryServiceTransport): def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -94,15 +94,15 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py b/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py index cce25848..3f30e62b 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -177,9 +188,9 @@ def transport(self) -> ServiceMonitoringServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ServiceMonitoringServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the service monitoring service client. @@ -223,12 +234,12 @@ def __init__( async def create_service( self, - request: Union[service_service.CreateServiceRequest, dict] = None, + request: Optional[Union[service_service.CreateServiceRequest, dict]] = None, *, - parent: str = None, - service: gm_service.Service = None, + parent: Optional[str] = None, + service: Optional[gm_service.Service] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_service.Service: r"""Create a ``Service``. @@ -260,7 +271,7 @@ async def sample_create_service(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.CreateServiceRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateServiceRequest, dict]]): The request object. The `CreateService` request. parent (:class:`str`): Required. Resource @@ -341,11 +352,11 @@ async def sample_create_service(): async def get_service( self, - request: Union[service_service.GetServiceRequest, dict] = None, + request: Optional[Union[service_service.GetServiceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.Service: r"""Get the named ``Service``. @@ -377,7 +388,7 @@ async def sample_get_service(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetServiceRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetServiceRequest, dict]]): The request object. The `GetService` request. name (:class:`str`): Required. Resource name of the ``Service``. The format @@ -459,11 +470,11 @@ async def sample_get_service(): async def list_services( self, - request: Union[service_service.ListServicesRequest, dict] = None, + request: Optional[Union[service_service.ListServicesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListServicesAsyncPager: r"""List ``Service``\ s for this workspace. @@ -496,7 +507,7 @@ async def sample_list_services(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListServicesRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListServicesRequest, dict]]): The request object. The `ListServices` request. parent (:class:`str`): Required. Resource name of the parent containing the @@ -588,11 +599,11 @@ async def sample_list_services(): async def update_service( self, - request: Union[service_service.UpdateServiceRequest, dict] = None, + request: Optional[Union[service_service.UpdateServiceRequest, dict]] = None, *, - service: gm_service.Service = None, + service: Optional[gm_service.Service] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_service.Service: r"""Update this ``Service``. @@ -623,7 +634,7 @@ async def sample_update_service(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.UpdateServiceRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.UpdateServiceRequest, dict]]): The request object. The `UpdateService` request. service (:class:`google.cloud.monitoring_v3.types.Service`): Required. The ``Service`` to draw updates from. The @@ -694,11 +705,11 @@ async def sample_update_service(): async def delete_service( self, - request: Union[service_service.DeleteServiceRequest, dict] = None, + request: Optional[Union[service_service.DeleteServiceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Soft delete this ``Service``. @@ -727,7 +738,7 @@ async def sample_delete_service(): await client.delete_service(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.DeleteServiceRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.DeleteServiceRequest, dict]]): The request object. The `DeleteService` request. name (:class:`str`): Required. Resource name of the ``Service`` to delete. @@ -796,12 +807,14 @@ async def sample_delete_service(): async def create_service_level_objective( self, - request: Union[service_service.CreateServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.CreateServiceLevelObjectiveRequest, dict] + ] = None, *, - parent: str = None, - service_level_objective: service.ServiceLevelObjective = None, + parent: Optional[str] = None, + service_level_objective: Optional[service.ServiceLevelObjective] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.ServiceLevelObjective: r"""Create a ``ServiceLevelObjective`` for the given ``Service``. @@ -833,7 +846,7 @@ async def sample_create_service_level_objective(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.CreateServiceLevelObjectiveRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateServiceLevelObjectiveRequest, dict]]): The request object. The `CreateServiceLevelObjective` request. parent (:class:`str`): @@ -924,11 +937,13 @@ async def sample_create_service_level_objective(): async def get_service_level_objective( self, - request: Union[service_service.GetServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.GetServiceLevelObjectiveRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.ServiceLevelObjective: r"""Get a ``ServiceLevelObjective`` by name. @@ -960,7 +975,7 @@ async def sample_get_service_level_objective(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetServiceLevelObjectiveRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetServiceLevelObjectiveRequest, dict]]): The request object. The `GetServiceLevelObjective` request. name (:class:`str`): @@ -1050,11 +1065,13 @@ async def sample_get_service_level_objective(): async def list_service_level_objectives( self, - request: Union[service_service.ListServiceLevelObjectivesRequest, dict] = None, + request: Optional[ + Union[service_service.ListServiceLevelObjectivesRequest, dict] + ] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListServiceLevelObjectivesAsyncPager: r"""List the ``ServiceLevelObjective``\ s for the given ``Service``. @@ -1087,7 +1104,7 @@ async def sample_list_service_level_objectives(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListServiceLevelObjectivesRequest, dict]]): The request object. The `ListServiceLevelObjectives` request. parent (:class:`str`): @@ -1179,11 +1196,13 @@ async def sample_list_service_level_objectives(): async def update_service_level_objective( self, - request: Union[service_service.UpdateServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.UpdateServiceLevelObjectiveRequest, dict] + ] = None, *, - service_level_objective: service.ServiceLevelObjective = None, + service_level_objective: Optional[service.ServiceLevelObjective] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.ServiceLevelObjective: r"""Update the given ``ServiceLevelObjective``. @@ -1214,7 +1233,7 @@ async def sample_update_service_level_objective(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.UpdateServiceLevelObjectiveRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.UpdateServiceLevelObjectiveRequest, dict]]): The request object. The `UpdateServiceLevelObjective` request. service_level_objective (:class:`google.cloud.monitoring_v3.types.ServiceLevelObjective`): @@ -1299,11 +1318,13 @@ async def sample_update_service_level_objective(): async def delete_service_level_objective( self, - request: Union[service_service.DeleteServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.DeleteServiceLevelObjectiveRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Delete the given ``ServiceLevelObjective``. @@ -1332,7 +1353,7 @@ async def sample_delete_service_level_objective(): await client.delete_service_level_objective(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.DeleteServiceLevelObjectiveRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.DeleteServiceLevelObjectiveRequest, dict]]): The request object. The `DeleteServiceLevelObjective` request. name (:class:`str`): @@ -1407,14 +1428,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ServiceMonitoringServiceAsyncClient",) diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/client.py b/google/cloud/monitoring_v3/services/service_monitoring_service/client.py index 215b4962..6d33ae2a 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/client.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -61,7 +73,7 @@ class ServiceMonitoringServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ServiceMonitoringServiceTransport]: """Returns an appropriate transport class. @@ -357,8 +369,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ServiceMonitoringServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, ServiceMonitoringServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the service monitoring service client. @@ -372,7 +384,7 @@ def __init__( transport (Union[str, ServiceMonitoringServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -402,6 +414,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -454,12 +467,12 @@ def __init__( def create_service( self, - request: Union[service_service.CreateServiceRequest, dict] = None, + request: Optional[Union[service_service.CreateServiceRequest, dict]] = None, *, - parent: str = None, - service: gm_service.Service = None, + parent: Optional[str] = None, + service: Optional[gm_service.Service] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_service.Service: r"""Create a ``Service``. @@ -572,11 +585,11 @@ def sample_create_service(): def get_service( self, - request: Union[service_service.GetServiceRequest, dict] = None, + request: Optional[Union[service_service.GetServiceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.Service: r"""Get the named ``Service``. @@ -681,11 +694,11 @@ def sample_get_service(): def list_services( self, - request: Union[service_service.ListServicesRequest, dict] = None, + request: Optional[Union[service_service.ListServicesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListServicesPager: r"""List ``Service``\ s for this workspace. @@ -801,11 +814,11 @@ def sample_list_services(): def update_service( self, - request: Union[service_service.UpdateServiceRequest, dict] = None, + request: Optional[Union[service_service.UpdateServiceRequest, dict]] = None, *, - service: gm_service.Service = None, + service: Optional[gm_service.Service] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gm_service.Service: r"""Update this ``Service``. @@ -907,11 +920,11 @@ def sample_update_service(): def delete_service( self, - request: Union[service_service.DeleteServiceRequest, dict] = None, + request: Optional[Union[service_service.DeleteServiceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Soft delete this ``Service``. @@ -1000,12 +1013,14 @@ def sample_delete_service(): def create_service_level_objective( self, - request: Union[service_service.CreateServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.CreateServiceLevelObjectiveRequest, dict] + ] = None, *, - parent: str = None, - service_level_objective: service.ServiceLevelObjective = None, + parent: Optional[str] = None, + service_level_objective: Optional[service.ServiceLevelObjective] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.ServiceLevelObjective: r"""Create a ``ServiceLevelObjective`` for the given ``Service``. @@ -1130,11 +1145,13 @@ def sample_create_service_level_objective(): def get_service_level_objective( self, - request: Union[service_service.GetServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.GetServiceLevelObjectiveRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.ServiceLevelObjective: r"""Get a ``ServiceLevelObjective`` by name. @@ -1249,11 +1266,13 @@ def sample_get_service_level_objective(): def list_service_level_objectives( self, - request: Union[service_service.ListServiceLevelObjectivesRequest, dict] = None, + request: Optional[ + Union[service_service.ListServiceLevelObjectivesRequest, dict] + ] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListServiceLevelObjectivesPager: r"""List the ``ServiceLevelObjective``\ s for the given ``Service``. @@ -1371,11 +1390,13 @@ def sample_list_service_level_objectives(): def update_service_level_objective( self, - request: Union[service_service.UpdateServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.UpdateServiceLevelObjectiveRequest, dict] + ] = None, *, - service_level_objective: service.ServiceLevelObjective = None, + service_level_objective: Optional[service.ServiceLevelObjective] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> service.ServiceLevelObjective: r"""Update the given ``ServiceLevelObjective``. @@ -1493,11 +1514,13 @@ def sample_update_service_level_objective(): def delete_service_level_objective( self, - request: Union[service_service.DeleteServiceLevelObjectiveRequest, dict] = None, + request: Optional[ + Union[service_service.DeleteServiceLevelObjectiveRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Delete the given ``ServiceLevelObjective``. @@ -1601,14 +1624,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ServiceMonitoringServiceClient",) diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py index 0c737414..dbc5ca5e 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.monitoring_v3 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,9 @@ from google.cloud.monitoring_v3.types import service_service from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class ServiceMonitoringServiceTransport(abc.ABC): @@ -55,7 +51,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py index 127b8e03..d0fe7ae2 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc.py @@ -53,14 +53,14 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -187,8 +187,8 @@ def __init__( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py index 59321cff..b718fccc 100644 --- a/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py +++ b/google/cloud/monitoring_v3/services/service_monitoring_service/transports/grpc_asyncio.py @@ -55,7 +55,7 @@ class ServiceMonitoringServiceGrpcAsyncIOTransport(ServiceMonitoringServiceTrans def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -98,15 +98,15 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py b/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py index 1756f9c3..7f66bbe3 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -174,9 +185,9 @@ def transport(self) -> UptimeCheckServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, UptimeCheckServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the uptime check service client. @@ -220,11 +231,13 @@ def __init__( async def list_uptime_check_configs( self, - request: Union[uptime_service.ListUptimeCheckConfigsRequest, dict] = None, + request: Optional[ + Union[uptime_service.ListUptimeCheckConfigsRequest, dict] + ] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListUptimeCheckConfigsAsyncPager: r"""Lists the existing valid Uptime check configurations @@ -259,7 +272,7 @@ async def sample_list_uptime_check_configs(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListUptimeCheckConfigsRequest, dict]]): The request object. The protocol for the `ListUptimeCheckConfigs` request. parent (:class:`str`): @@ -351,11 +364,13 @@ async def sample_list_uptime_check_configs(): async def get_uptime_check_config( self, - request: Union[uptime_service.GetUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.GetUptimeCheckConfigRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> uptime.UptimeCheckConfig: r"""Gets a single Uptime check configuration. @@ -387,7 +402,7 @@ async def sample_get_uptime_check_config(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.GetUptimeCheckConfigRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.GetUptimeCheckConfigRequest, dict]]): The request object. The protocol for the `GetUptimeCheckConfig` request. name (:class:`str`): @@ -467,12 +482,14 @@ async def sample_get_uptime_check_config(): async def create_uptime_check_config( self, - request: Union[uptime_service.CreateUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.CreateUptimeCheckConfigRequest, dict] + ] = None, *, - parent: str = None, - uptime_check_config: uptime.UptimeCheckConfig = None, + parent: Optional[str] = None, + uptime_check_config: Optional[uptime.UptimeCheckConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> uptime.UptimeCheckConfig: r"""Creates a new Uptime check configuration. @@ -504,7 +521,7 @@ async def sample_create_uptime_check_config(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.CreateUptimeCheckConfigRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.CreateUptimeCheckConfigRequest, dict]]): The request object. The protocol for the `CreateUptimeCheckConfig` request. parent (:class:`str`): @@ -585,11 +602,13 @@ async def sample_create_uptime_check_config(): async def update_uptime_check_config( self, - request: Union[uptime_service.UpdateUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.UpdateUptimeCheckConfigRequest, dict] + ] = None, *, - uptime_check_config: uptime.UptimeCheckConfig = None, + uptime_check_config: Optional[uptime.UptimeCheckConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> uptime.UptimeCheckConfig: r"""Updates an Uptime check configuration. You can either replace @@ -624,7 +643,7 @@ async def sample_update_uptime_check_config(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.UpdateUptimeCheckConfigRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.UpdateUptimeCheckConfigRequest, dict]]): The request object. The protocol for the `UpdateUptimeCheckConfig` request. uptime_check_config (:class:`google.cloud.monitoring_v3.types.UptimeCheckConfig`): @@ -704,11 +723,13 @@ async def sample_update_uptime_check_config(): async def delete_uptime_check_config( self, - request: Union[uptime_service.DeleteUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.DeleteUptimeCheckConfigRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an Uptime check configuration. Note that this @@ -740,7 +761,7 @@ async def sample_delete_uptime_check_config(): await client.delete_uptime_check_config(request=request) Args: - request (Union[google.cloud.monitoring_v3.types.DeleteUptimeCheckConfigRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.DeleteUptimeCheckConfigRequest, dict]]): The request object. The protocol for the `DeleteUptimeCheckConfig` request. name (:class:`str`): @@ -810,10 +831,10 @@ async def sample_delete_uptime_check_config(): async def list_uptime_check_ips( self, - request: Union[uptime_service.ListUptimeCheckIpsRequest, dict] = None, + request: Optional[Union[uptime_service.ListUptimeCheckIpsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListUptimeCheckIpsAsyncPager: r"""Returns the list of IP addresses that checkers run @@ -846,7 +867,7 @@ async def sample_list_uptime_check_ips(): print(response) Args: - request (Union[google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest, dict]): + request (Optional[Union[google.cloud.monitoring_v3.types.ListUptimeCheckIpsRequest, dict]]): The request object. The protocol for the `ListUptimeCheckIps` request. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -910,14 +931,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("UptimeCheckServiceAsyncClient",) diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/client.py b/google/cloud/monitoring_v3/services/uptime_check_service/client.py index 3fce2c03..9544d2a7 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/client.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.monitoring_v3 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -60,7 +72,7 @@ class UptimeCheckServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[UptimeCheckServiceTransport]: """Returns an appropriate transport class. @@ -341,8 +353,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, UptimeCheckServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, UptimeCheckServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the uptime check service client. @@ -356,7 +368,7 @@ def __init__( transport (Union[str, UptimeCheckServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -386,6 +398,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -438,11 +451,13 @@ def __init__( def list_uptime_check_configs( self, - request: Union[uptime_service.ListUptimeCheckConfigsRequest, dict] = None, + request: Optional[ + Union[uptime_service.ListUptimeCheckConfigsRequest, dict] + ] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListUptimeCheckConfigsPager: r"""Lists the existing valid Uptime check configurations @@ -562,11 +577,13 @@ def sample_list_uptime_check_configs(): def get_uptime_check_config( self, - request: Union[uptime_service.GetUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.GetUptimeCheckConfigRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> uptime.UptimeCheckConfig: r"""Gets a single Uptime check configuration. @@ -669,12 +686,14 @@ def sample_get_uptime_check_config(): def create_uptime_check_config( self, - request: Union[uptime_service.CreateUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.CreateUptimeCheckConfigRequest, dict] + ] = None, *, - parent: str = None, - uptime_check_config: uptime.UptimeCheckConfig = None, + parent: Optional[str] = None, + uptime_check_config: Optional[uptime.UptimeCheckConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> uptime.UptimeCheckConfig: r"""Creates a new Uptime check configuration. @@ -789,11 +808,13 @@ def sample_create_uptime_check_config(): def update_uptime_check_config( self, - request: Union[uptime_service.UpdateUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.UpdateUptimeCheckConfigRequest, dict] + ] = None, *, - uptime_check_config: uptime.UptimeCheckConfig = None, + uptime_check_config: Optional[uptime.UptimeCheckConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> uptime.UptimeCheckConfig: r"""Updates an Uptime check configuration. You can either replace @@ -910,11 +931,13 @@ def sample_update_uptime_check_config(): def delete_uptime_check_config( self, - request: Union[uptime_service.DeleteUptimeCheckConfigRequest, dict] = None, + request: Optional[ + Union[uptime_service.DeleteUptimeCheckConfigRequest, dict] + ] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an Uptime check configuration. Note that this @@ -1009,10 +1032,10 @@ def sample_delete_uptime_check_config(): def list_uptime_check_ips( self, - request: Union[uptime_service.ListUptimeCheckIpsRequest, dict] = None, + request: Optional[Union[uptime_service.ListUptimeCheckIpsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListUptimeCheckIpsPager: r"""Returns the list of IP addresses that checkers run @@ -1108,14 +1131,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("UptimeCheckServiceClient",) diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py b/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py index 38eb3e65..e9c2091d 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.monitoring_v3 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -29,14 +30,9 @@ from google.cloud.monitoring_v3.types import uptime_service from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-monitoring", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class UptimeCheckServiceTransport(abc.ABC): @@ -54,7 +50,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py b/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py index a0535467..9806261b 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc.py @@ -56,14 +56,14 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -190,8 +190,8 @@ def __init__( def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py b/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py index 41ffee13..cd8f3182 100644 --- a/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py +++ b/google/cloud/monitoring_v3/services/uptime_check_service/transports/grpc_asyncio.py @@ -58,7 +58,7 @@ class UptimeCheckServiceGrpcAsyncIOTransport(UptimeCheckServiceTransport): def create_channel( cls, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -101,15 +101,15 @@ def __init__( self, *, host: str = "monitoring.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/monitoring_v3/types/alert.py b/google/cloud/monitoring_v3/types/alert.py index ec0e04cf..531ff9a5 100644 --- a/google/cloud/monitoring_v3/types/alert.py +++ b/google/cloud/monitoring_v3/types/alert.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.monitoring_v3.types import common @@ -67,7 +69,7 @@ class AlertPolicy(proto.Message): underlying problems detected by the alerting policy. Notification channels that have limited capacity might not show this documentation. - user_labels (Mapping[str, str]): + user_labels (MutableMapping[str, str]): User-supplied key/value data to be used for organizing and identifying the ``AlertPolicy`` objects. @@ -76,7 +78,7 @@ class AlertPolicy(proto.Message): is smaller. Labels and values can contain only lowercase letters, numerals, underscores, and dashes. Keys must begin with a letter. - conditions (Sequence[google.cloud.monitoring_v3.types.AlertPolicy.Condition]): + conditions (MutableSequence[google.cloud.monitoring_v3.types.AlertPolicy.Condition]): A list of conditions for the policy. The conditions are combined by AND or OR according to the ``combiner`` field. If the combined conditions evaluate to true, then an @@ -102,7 +104,7 @@ class AlertPolicy(proto.Message): is invalid. OK if the alert policy is valid. If not OK, the alert policy will not generate incidents. - notification_channels (Sequence[str]): + notification_channels (MutableSequence[str]): Identifies the notification channels to which notifications should be sent when incidents are opened or closed or when new violations occur on an already opened incident. Each @@ -156,11 +158,11 @@ class Documentation(proto.Message): more information. """ - content = proto.Field( + content: str = proto.Field( proto.STRING, number=1, ) - mime_type = proto.Field( + mime_type: str = proto.Field( proto.STRING, number=2, ) @@ -272,12 +274,12 @@ class Trigger(proto.Message): This field is a member of `oneof`_ ``type``. """ - count = proto.Field( + count: int = proto.Field( proto.INT32, number=1, oneof="type", ) - percent = proto.Field( + percent: float = proto.Field( proto.DOUBLE, number=2, oneof="type", @@ -302,7 +304,7 @@ class MetricThreshold(proto.Message): type and the resource type. Optionally, it can specify resource labels and metric labels. This field must not exceed 2048 Unicode characters in length. - aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): + aggregations (MutableSequence[google.cloud.monitoring_v3.types.Aggregation]): Specifies the alignment of data points in individual time series as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each @@ -328,7 +330,7 @@ class MetricThreshold(proto.Message): contain restrictions on resource type, resource labels, and metric labels. This field may not exceed 2048 Unicode characters in length. - denominator_aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): + denominator_aggregations (MutableSequence[google.cloud.monitoring_v3.types.Aggregation]): Specifies the alignment of data points in individual time series selected by ``denominatorFilter`` as well as how to combine the retrieved time series together (such as when @@ -378,47 +380,51 @@ class MetricThreshold(proto.Message): data stops arriving. """ - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - aggregations = proto.RepeatedField( + aggregations: MutableSequence[common.Aggregation] = proto.RepeatedField( proto.MESSAGE, number=8, message=common.Aggregation, ) - denominator_filter = proto.Field( + denominator_filter: str = proto.Field( proto.STRING, number=9, ) - denominator_aggregations = proto.RepeatedField( + denominator_aggregations: MutableSequence[ + common.Aggregation + ] = proto.RepeatedField( proto.MESSAGE, number=10, message=common.Aggregation, ) - comparison = proto.Field( + comparison: common.ComparisonType = proto.Field( proto.ENUM, number=4, enum=common.ComparisonType, ) - threshold_value = proto.Field( + threshold_value: float = proto.Field( proto.DOUBLE, number=5, ) - duration = proto.Field( + duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=6, message=duration_pb2.Duration, ) - trigger = proto.Field( + trigger: "AlertPolicy.Condition.Trigger" = proto.Field( proto.MESSAGE, number=7, message="AlertPolicy.Condition.Trigger", ) - evaluation_missing_data = proto.Field( - proto.ENUM, - number=11, - enum="AlertPolicy.Condition.EvaluationMissingData", + evaluation_missing_data: "AlertPolicy.Condition.EvaluationMissingData" = ( + proto.Field( + proto.ENUM, + number=11, + enum="AlertPolicy.Condition.EvaluationMissingData", + ) ) class MetricAbsence(proto.Message): @@ -443,7 +449,7 @@ class MetricAbsence(proto.Message): type and the resource type. Optionally, it can specify resource labels and metric labels. This field must not exceed 2048 Unicode characters in length. - aggregations (Sequence[google.cloud.monitoring_v3.types.Aggregation]): + aggregations (MutableSequence[google.cloud.monitoring_v3.types.Aggregation]): Specifies the alignment of data points in individual time series as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each @@ -471,21 +477,21 @@ class MetricAbsence(proto.Message): identified by ``filter`` and ``aggregations``. """ - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=1, ) - aggregations = proto.RepeatedField( + aggregations: MutableSequence[common.Aggregation] = proto.RepeatedField( proto.MESSAGE, number=5, message=common.Aggregation, ) - duration = proto.Field( + duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=2, message=duration_pb2.Duration, ) - trigger = proto.Field( + trigger: "AlertPolicy.Condition.Trigger" = proto.Field( proto.MESSAGE, number=3, message="AlertPolicy.Condition.Trigger", @@ -502,7 +508,7 @@ class LogMatch(proto.Message): Required. A logs-based filter. See `Advanced Logs Queries `__ for how this filter should be constructed. - label_extractors (Mapping[str, str]): + label_extractors (MutableMapping[str, str]): Optional. A map from a label key to an extractor expression, which is used to extract the value for this label key. Each entry in this map is a specification for how data should be @@ -517,11 +523,11 @@ class LogMatch(proto.Message): for syntax and examples. """ - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=1, ) - label_extractors = proto.MapField( + label_extractors: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, @@ -563,53 +569,55 @@ class MonitoringQueryLanguageCondition(proto.Message): data stops arriving. """ - query = proto.Field( + query: str = proto.Field( proto.STRING, number=1, ) - duration = proto.Field( + duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=2, message=duration_pb2.Duration, ) - trigger = proto.Field( + trigger: "AlertPolicy.Condition.Trigger" = proto.Field( proto.MESSAGE, number=3, message="AlertPolicy.Condition.Trigger", ) - evaluation_missing_data = proto.Field( - proto.ENUM, - number=4, - enum="AlertPolicy.Condition.EvaluationMissingData", + evaluation_missing_data: "AlertPolicy.Condition.EvaluationMissingData" = ( + proto.Field( + proto.ENUM, + number=4, + enum="AlertPolicy.Condition.EvaluationMissingData", + ) ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=12, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=6, ) - condition_threshold = proto.Field( + condition_threshold: "AlertPolicy.Condition.MetricThreshold" = proto.Field( proto.MESSAGE, number=1, oneof="condition", message="AlertPolicy.Condition.MetricThreshold", ) - condition_absent = proto.Field( + condition_absent: "AlertPolicy.Condition.MetricAbsence" = proto.Field( proto.MESSAGE, number=2, oneof="condition", message="AlertPolicy.Condition.MetricAbsence", ) - condition_matched_log = proto.Field( + condition_matched_log: "AlertPolicy.Condition.LogMatch" = proto.Field( proto.MESSAGE, number=20, oneof="condition", message="AlertPolicy.Condition.LogMatch", ) - condition_monitoring_query_language = proto.Field( + condition_monitoring_query_language: "AlertPolicy.Condition.MonitoringQueryLanguageCondition" = proto.Field( proto.MESSAGE, number=19, oneof="condition", @@ -641,76 +649,78 @@ class NotificationRateLimit(proto.Message): Not more than one notification per ``period``. """ - period = proto.Field( + period: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=1, message=duration_pb2.Duration, ) - notification_rate_limit = proto.Field( - proto.MESSAGE, - number=1, - message="AlertPolicy.AlertStrategy.NotificationRateLimit", + notification_rate_limit: "AlertPolicy.AlertStrategy.NotificationRateLimit" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="AlertPolicy.AlertStrategy.NotificationRateLimit", + ) ) - auto_close = proto.Field( + auto_close: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=3, message=duration_pb2.Duration, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - documentation = proto.Field( + documentation: Documentation = proto.Field( proto.MESSAGE, number=13, message=Documentation, ) - user_labels = proto.MapField( + user_labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=16, ) - conditions = proto.RepeatedField( + conditions: MutableSequence[Condition] = proto.RepeatedField( proto.MESSAGE, number=12, message=Condition, ) - combiner = proto.Field( + combiner: ConditionCombinerType = proto.Field( proto.ENUM, number=6, enum=ConditionCombinerType, ) - enabled = proto.Field( + enabled: wrappers_pb2.BoolValue = proto.Field( proto.MESSAGE, number=17, message=wrappers_pb2.BoolValue, ) - validity = proto.Field( + validity: status_pb2.Status = proto.Field( proto.MESSAGE, number=18, message=status_pb2.Status, ) - notification_channels = proto.RepeatedField( + notification_channels: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=14, ) - creation_record = proto.Field( + creation_record: gm_mutation_record.MutationRecord = proto.Field( proto.MESSAGE, number=10, message=gm_mutation_record.MutationRecord, ) - mutation_record = proto.Field( + mutation_record: gm_mutation_record.MutationRecord = proto.Field( proto.MESSAGE, number=11, message=gm_mutation_record.MutationRecord, ) - alert_strategy = proto.Field( + alert_strategy: AlertStrategy = proto.Field( proto.MESSAGE, number=21, message=AlertStrategy, diff --git a/google/cloud/monitoring_v3/types/alert_service.py b/google/cloud/monitoring_v3/types/alert_service.py index 7cb0b0a3..bc92e77e 100644 --- a/google/cloud/monitoring_v3/types/alert_service.py +++ b/google/cloud/monitoring_v3/types/alert_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.monitoring_v3.types import alert @@ -59,11 +61,11 @@ class CreateAlertPolicyRequest(proto.Message): the new policy, including a new ``[ALERT_POLICY_ID]`` value. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) - alert_policy = proto.Field( + alert_policy: alert.AlertPolicy = proto.Field( proto.MESSAGE, number=2, message=alert.AlertPolicy, @@ -82,7 +84,7 @@ class GetAlertPolicyRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) @@ -130,23 +132,23 @@ class ListAlertPoliciesRequest(proto.Message): results from the previous method call. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=4, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=5, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=6, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -156,7 +158,7 @@ class ListAlertPoliciesResponse(proto.Message): r"""The protocol for the ``ListAlertPolicies`` response. Attributes: - alert_policies (Sequence[google.cloud.monitoring_v3.types.AlertPolicy]): + alert_policies (MutableSequence[google.cloud.monitoring_v3.types.AlertPolicy]): The returned alert policies. next_page_token (str): If there might be more results than were returned, then this @@ -173,16 +175,16 @@ class ListAlertPoliciesResponse(proto.Message): def raw_page(self): return self - alert_policies = proto.RepeatedField( + alert_policies: MutableSequence[alert.AlertPolicy] = proto.RepeatedField( proto.MESSAGE, number=3, message=alert.AlertPolicy, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - total_size = proto.Field( + total_size: int = proto.Field( proto.INT32, number=4, ) @@ -227,12 +229,12 @@ class UpdateAlertPolicyRequest(proto.Message): ``update_mask`` are ignored. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - alert_policy = proto.Field( + alert_policy: alert.AlertPolicy = proto.Field( proto.MESSAGE, number=3, message=alert.AlertPolicy, @@ -254,7 +256,7 @@ class DeleteAlertPolicyRequest(proto.Message): [AlertPolicy][google.monitoring.v3.AlertPolicy]. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) diff --git a/google/cloud/monitoring_v3/types/common.py b/google/cloud/monitoring_v3/types/common.py index 5a021a76..7d45d3a9 100644 --- a/google/cloud/monitoring_v3/types/common.py +++ b/google/cloud/monitoring_v3/types/common.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import distribution_pb2 # type: ignore @@ -93,27 +95,27 @@ class TypedValue(proto.Message): This field is a member of `oneof`_ ``value``. """ - bool_value = proto.Field( + bool_value: bool = proto.Field( proto.BOOL, number=1, oneof="value", ) - int64_value = proto.Field( + int64_value: int = proto.Field( proto.INT64, number=2, oneof="value", ) - double_value = proto.Field( + double_value: float = proto.Field( proto.DOUBLE, number=3, oneof="value", ) - string_value = proto.Field( + string_value: str = proto.Field( proto.STRING, number=4, oneof="value", ) - distribution_value = proto.Field( + distribution_value: distribution_pb2.Distribution = proto.Field( proto.MESSAGE, number=5, oneof="value", @@ -169,12 +171,12 @@ class TimeInterval(proto.Message): end time. """ - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - start_time = proto.Field( + start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, @@ -266,7 +268,7 @@ class Aggregation(proto.Message): then ``per_series_aligner`` must be specified, and must not be ``ALIGN_NONE``. An ``alignment_period`` must also be specified; otherwise, an error is returned. - group_by_fields (Sequence[str]): + group_by_fields (MutableSequence[str]): The set of fields to preserve when ``cross_series_reducer`` is specified. The ``group_by_fields`` determine how the time series are partitioned into subsets prior to applying the @@ -340,22 +342,22 @@ class Reducer(proto.Enum): REDUCE_PERCENTILE_50 = 11 REDUCE_PERCENTILE_05 = 12 - alignment_period = proto.Field( + alignment_period: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=1, message=duration_pb2.Duration, ) - per_series_aligner = proto.Field( + per_series_aligner: Aligner = proto.Field( proto.ENUM, number=2, enum=Aligner, ) - cross_series_reducer = proto.Field( + cross_series_reducer: Reducer = proto.Field( proto.ENUM, number=4, enum=Reducer, ) - group_by_fields = proto.RepeatedField( + group_by_fields: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) diff --git a/google/cloud/monitoring_v3/types/dropped_labels.py b/google/cloud/monitoring_v3/types/dropped_labels.py index b8b6cbce..8c35da2d 100644 --- a/google/cloud/monitoring_v3/types/dropped_labels.py +++ b/google/cloud/monitoring_v3/types/dropped_labels.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore @@ -44,12 +46,12 @@ class DroppedLabels(proto.Message): clients to resolve any ambiguities. Attributes: - label (Mapping[str, str]): + label (MutableMapping[str, str]): Map from label to its value, for all labels dropped in any aggregation. """ - label = proto.MapField( + label: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=1, diff --git a/google/cloud/monitoring_v3/types/group.py b/google/cloud/monitoring_v3/types/group.py index bde854f2..27136380 100644 --- a/google/cloud/monitoring_v3/types/group.py +++ b/google/cloud/monitoring_v3/types/group.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore @@ -87,23 +89,23 @@ class Group(proto.Message): clusters. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - parent_name = proto.Field( + parent_name: str = proto.Field( proto.STRING, number=3, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=5, ) - is_cluster = proto.Field( + is_cluster: bool = proto.Field( proto.BOOL, number=6, ) diff --git a/google/cloud/monitoring_v3/types/group_service.py b/google/cloud/monitoring_v3/types/group_service.py index bd0231c5..c7551099 100644 --- a/google/cloud/monitoring_v3/types/group_service.py +++ b/google/cloud/monitoring_v3/types/group_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import monitored_resource_pb2 # type: ignore @@ -103,30 +105,30 @@ class ListGroupsRequest(proto.Message): additional results from the previous method call. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=7, ) - children_of_group = proto.Field( + children_of_group: str = proto.Field( proto.STRING, number=2, oneof="filter", ) - ancestors_of_group = proto.Field( + ancestors_of_group: str = proto.Field( proto.STRING, number=3, oneof="filter", ) - descendants_of_group = proto.Field( + descendants_of_group: str = proto.Field( proto.STRING, number=4, oneof="filter", ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=5, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=6, ) @@ -136,7 +138,7 @@ class ListGroupsResponse(proto.Message): r"""The ``ListGroups`` response. Attributes: - group (Sequence[google.cloud.monitoring_v3.types.Group]): + group (MutableSequence[google.cloud.monitoring_v3.types.Group]): The groups that match the specified filters. next_page_token (str): If there are more results than have been returned, then this @@ -149,12 +151,12 @@ class ListGroupsResponse(proto.Message): def raw_page(self): return self - group = proto.RepeatedField( + group: MutableSequence[gm_group.Group] = proto.RepeatedField( proto.MESSAGE, number=1, message=gm_group.Group, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -172,7 +174,7 @@ class GetGroupRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) @@ -198,16 +200,16 @@ class CreateGroupRequest(proto.Message): create the group. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=4, ) - group = proto.Field( + group: gm_group.Group = proto.Field( proto.MESSAGE, number=2, message=gm_group.Group, ) - validate_only = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, number=3, ) @@ -226,12 +228,12 @@ class UpdateGroupRequest(proto.Message): update the existing group. """ - group = proto.Field( + group: gm_group.Group = proto.Field( proto.MESSAGE, number=2, message=gm_group.Group, ) - validate_only = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, number=3, ) @@ -256,11 +258,11 @@ class DeleteGroupRequest(proto.Message): value is false. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) - recursive = proto.Field( + recursive: bool = proto.Field( proto.BOOL, number=4, ) @@ -305,23 +307,23 @@ class ListGroupMembersRequest(proto.Message): minute is returned. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=7, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=5, ) - interval = proto.Field( + interval: common.TimeInterval = proto.Field( proto.MESSAGE, number=6, message=common.TimeInterval, @@ -332,7 +334,7 @@ class ListGroupMembersResponse(proto.Message): r"""The ``ListGroupMembers`` response. Attributes: - members (Sequence[google.api.monitored_resource_pb2.MonitoredResource]): + members (MutableSequence[google.api.monitored_resource_pb2.MonitoredResource]): A set of monitored resources in the group. next_page_token (str): If there are more results than have been returned, then this @@ -348,16 +350,18 @@ class ListGroupMembersResponse(proto.Message): def raw_page(self): return self - members = proto.RepeatedField( + members: MutableSequence[ + monitored_resource_pb2.MonitoredResource + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=monitored_resource_pb2.MonitoredResource, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - total_size = proto.Field( + total_size: int = proto.Field( proto.INT32, number=3, ) diff --git a/google/cloud/monitoring_v3/types/metric.py b/google/cloud/monitoring_v3/types/metric.py index a99581b0..50690db7 100644 --- a/google/cloud/monitoring_v3/types/metric.py +++ b/google/cloud/monitoring_v3/types/metric.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import label_pb2 # type: ignore @@ -55,12 +57,12 @@ class Point(proto.Message): The value of the data point. """ - interval = proto.Field( + interval: common.TimeInterval = proto.Field( proto.MESSAGE, number=1, message=common.TimeInterval, ) - value = proto.Field( + value: common.TypedValue = proto.Field( proto.MESSAGE, number=2, message=common.TypedValue, @@ -111,7 +113,7 @@ class TimeSeries(proto.Message): When creating a time series, this field is optional. If present, it must be the same as the type of the data in the ``points`` field. - points (Sequence[google.cloud.monitoring_v3.types.Point]): + points (MutableSequence[google.cloud.monitoring_v3.types.Point]): The data points of this time series. When listing time series, points are returned in reverse time order. @@ -128,37 +130,37 @@ class TimeSeries(proto.Message): of the stored metric values. """ - metric = proto.Field( + metric: metric_pb2.Metric = proto.Field( proto.MESSAGE, number=1, message=metric_pb2.Metric, ) - resource = proto.Field( + resource: monitored_resource_pb2.MonitoredResource = proto.Field( proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, ) - metadata = proto.Field( + metadata: monitored_resource_pb2.MonitoredResourceMetadata = proto.Field( proto.MESSAGE, number=7, message=monitored_resource_pb2.MonitoredResourceMetadata, ) - metric_kind = proto.Field( + metric_kind: metric_pb2.MetricDescriptor.MetricKind = proto.Field( proto.ENUM, number=3, enum=metric_pb2.MetricDescriptor.MetricKind, ) - value_type = proto.Field( + value_type: metric_pb2.MetricDescriptor.ValueType = proto.Field( proto.ENUM, number=4, enum=metric_pb2.MetricDescriptor.ValueType, ) - points = proto.RepeatedField( + points: MutableSequence["Point"] = proto.RepeatedField( proto.MESSAGE, number=5, message="Point", ) - unit = proto.Field( + unit: str = proto.Field( proto.STRING, number=8, ) @@ -168,9 +170,9 @@ class TimeSeriesDescriptor(proto.Message): r"""A descriptor for the labels and points in a time series. Attributes: - label_descriptors (Sequence[google.api.label_pb2.LabelDescriptor]): + label_descriptors (MutableSequence[google.api.label_pb2.LabelDescriptor]): Descriptors for the labels. - point_descriptors (Sequence[google.cloud.monitoring_v3.types.TimeSeriesDescriptor.ValueDescriptor]): + point_descriptors (MutableSequence[google.cloud.monitoring_v3.types.TimeSeriesDescriptor.ValueDescriptor]): Descriptors for the point data value columns. """ @@ -191,31 +193,31 @@ class ValueDescriptor(proto.Message): if ``value_type`` is INTEGER, DOUBLE, DISTRIBUTION. """ - key = proto.Field( + key: str = proto.Field( proto.STRING, number=1, ) - value_type = proto.Field( + value_type: metric_pb2.MetricDescriptor.ValueType = proto.Field( proto.ENUM, number=2, enum=metric_pb2.MetricDescriptor.ValueType, ) - metric_kind = proto.Field( + metric_kind: metric_pb2.MetricDescriptor.MetricKind = proto.Field( proto.ENUM, number=3, enum=metric_pb2.MetricDescriptor.MetricKind, ) - unit = proto.Field( + unit: str = proto.Field( proto.STRING, number=4, ) - label_descriptors = proto.RepeatedField( + label_descriptors: MutableSequence[label_pb2.LabelDescriptor] = proto.RepeatedField( proto.MESSAGE, number=1, message=label_pb2.LabelDescriptor, ) - point_descriptors = proto.RepeatedField( + point_descriptors: MutableSequence[ValueDescriptor] = proto.RepeatedField( proto.MESSAGE, number=5, message=ValueDescriptor, @@ -227,13 +229,13 @@ class TimeSeriesData(proto.Message): TimeSeriesDescriptor. Attributes: - label_values (Sequence[google.cloud.monitoring_v3.types.LabelValue]): + label_values (MutableSequence[google.cloud.monitoring_v3.types.LabelValue]): The values of the labels in the time series identifier, given in the same order as the ``label_descriptors`` field of the TimeSeriesDescriptor associated with this object. Each value must have a value of the type given in the corresponding entry of ``label_descriptors``. - point_data (Sequence[google.cloud.monitoring_v3.types.TimeSeriesData.PointData]): + point_data (MutableSequence[google.cloud.monitoring_v3.types.TimeSeriesData.PointData]): The points in the time series. """ @@ -244,29 +246,29 @@ class PointData(proto.Message): with this object. Attributes: - values (Sequence[google.cloud.monitoring_v3.types.TypedValue]): + values (MutableSequence[google.cloud.monitoring_v3.types.TypedValue]): The values that make up the point. time_interval (google.cloud.monitoring_v3.types.TimeInterval): The time interval associated with the point. """ - values = proto.RepeatedField( + values: MutableSequence[common.TypedValue] = proto.RepeatedField( proto.MESSAGE, number=1, message=common.TypedValue, ) - time_interval = proto.Field( + time_interval: common.TimeInterval = proto.Field( proto.MESSAGE, number=2, message=common.TimeInterval, ) - label_values = proto.RepeatedField( + label_values: MutableSequence["LabelValue"] = proto.RepeatedField( proto.MESSAGE, number=1, message="LabelValue", ) - point_data = proto.RepeatedField( + point_data: MutableSequence[PointData] = proto.RepeatedField( proto.MESSAGE, number=2, message=PointData, @@ -298,17 +300,17 @@ class LabelValue(proto.Message): This field is a member of `oneof`_ ``value``. """ - bool_value = proto.Field( + bool_value: bool = proto.Field( proto.BOOL, number=1, oneof="value", ) - int64_value = proto.Field( + int64_value: int = proto.Field( proto.INT64, number=2, oneof="value", ) - string_value = proto.Field( + string_value: str = proto.Field( proto.STRING, number=3, oneof="value", @@ -327,12 +329,12 @@ class QueryError(proto.Message): The error message. """ - locator = proto.Field( + locator: "TextLocator" = proto.Field( proto.MESSAGE, number=1, message="TextLocator", ) - message = proto.Field( + message: str = proto.Field( proto.STRING, number=2, ) @@ -398,35 +400,35 @@ class Position(proto.Message): index even though the text is UTF-8. """ - line = proto.Field( + line: int = proto.Field( proto.INT32, number=1, ) - column = proto.Field( + column: int = proto.Field( proto.INT32, number=2, ) - source = proto.Field( + source: str = proto.Field( proto.STRING, number=1, ) - start_position = proto.Field( + start_position: Position = proto.Field( proto.MESSAGE, number=2, message=Position, ) - end_position = proto.Field( + end_position: Position = proto.Field( proto.MESSAGE, number=3, message=Position, ) - nested_locator = proto.Field( + nested_locator: "TextLocator" = proto.Field( proto.MESSAGE, number=4, message="TextLocator", ) - nesting_reason = proto.Field( + nesting_reason: str = proto.Field( proto.STRING, number=5, ) diff --git a/google/cloud/monitoring_v3/types/metric_service.py b/google/cloud/monitoring_v3/types/metric_service.py index 6c642bfd..ba934d72 100644 --- a/google/cloud/monitoring_v3/types/metric_service.py +++ b/google/cloud/monitoring_v3/types/metric_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import metric_pb2 # type: ignore @@ -78,19 +80,19 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): additional results from the previous method call. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=5, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -100,7 +102,7 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""The ``ListMonitoredResourceDescriptors`` response. Attributes: - resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + resource_descriptors (MutableSequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): The monitored resource descriptors that are available to this project and that match ``filter``, if present. next_page_token (str): @@ -114,12 +116,14 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): def raw_page(self): return self - resource_descriptors = proto.RepeatedField( + resource_descriptors: MutableSequence[ + monitored_resource_pb2.MonitoredResourceDescriptor + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -141,7 +145,7 @@ class GetMonitoredResourceDescriptorRequest(proto.Message): ``cloudsql_database``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) @@ -180,19 +184,19 @@ class ListMetricDescriptorsRequest(proto.Message): additional results from the previous method call. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=5, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -202,7 +206,7 @@ class ListMetricDescriptorsResponse(proto.Message): r"""The ``ListMetricDescriptors`` response. Attributes: - metric_descriptors (Sequence[google.api.metric_pb2.MetricDescriptor]): + metric_descriptors (MutableSequence[google.api.metric_pb2.MetricDescriptor]): The metric descriptors that are available to the project and that match the value of ``filter``, if present. next_page_token (str): @@ -216,12 +220,14 @@ class ListMetricDescriptorsResponse(proto.Message): def raw_page(self): return self - metric_descriptors = proto.RepeatedField( + metric_descriptors: MutableSequence[ + metric_pb2.MetricDescriptor + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=metric_pb2.MetricDescriptor, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -243,7 +249,7 @@ class GetMetricDescriptorRequest(proto.Message): ``"compute.googleapis.com/instance/disk/read_bytes_count"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) @@ -264,11 +270,11 @@ class CreateMetricDescriptorRequest(proto.Message): descriptor. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) - metric_descriptor = proto.Field( + metric_descriptor: metric_pb2.MetricDescriptor = proto.Field( proto.MESSAGE, number=2, message=metric_pb2.MetricDescriptor, @@ -291,7 +297,7 @@ class DeleteMetricDescriptorRequest(proto.Message): ``"custom.googleapis.com/my_test_metric"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) @@ -365,43 +371,43 @@ class TimeSeriesView(proto.Enum): FULL = 0 HEADERS = 1 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=10, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - interval = proto.Field( + interval: common.TimeInterval = proto.Field( proto.MESSAGE, number=4, message=common.TimeInterval, ) - aggregation = proto.Field( + aggregation: common.Aggregation = proto.Field( proto.MESSAGE, number=5, message=common.Aggregation, ) - secondary_aggregation = proto.Field( + secondary_aggregation: common.Aggregation = proto.Field( proto.MESSAGE, number=11, message=common.Aggregation, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=6, ) - view = proto.Field( + view: TimeSeriesView = proto.Field( proto.ENUM, number=7, enum=TimeSeriesView, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=8, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=9, ) @@ -411,7 +417,7 @@ class ListTimeSeriesResponse(proto.Message): r"""The ``ListTimeSeries`` response. Attributes: - time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): + time_series (MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]): One or more time series that match the filter included in the request. next_page_token (str): @@ -419,7 +425,7 @@ class ListTimeSeriesResponse(proto.Message): field is set to a non-empty value. To see the additional results, use that value as ``page_token`` in the next call to this method. - execution_errors (Sequence[google.rpc.status_pb2.Status]): + execution_errors (MutableSequence[google.rpc.status_pb2.Status]): Query execution errors that may have caused the time series data returned to be incomplete. unit (str): @@ -435,21 +441,21 @@ class ListTimeSeriesResponse(proto.Message): def raw_page(self): return self - time_series = proto.RepeatedField( + time_series: MutableSequence[gm_metric.TimeSeries] = proto.RepeatedField( proto.MESSAGE, number=1, message=gm_metric.TimeSeries, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - execution_errors = proto.RepeatedField( + execution_errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( proto.MESSAGE, number=3, message=status_pb2.Status, ) - unit = proto.Field( + unit: str = proto.Field( proto.STRING, number=5, ) @@ -467,7 +473,7 @@ class CreateTimeSeriesRequest(proto.Message): :: projects/[PROJECT_ID_OR_NUMBER] - time_series (Sequence[google.cloud.monitoring_v3.types.TimeSeries]): + time_series (MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]): Required. The new data to be added to a list of time series. Adds at most one data point to each of several time series. The new data point must be more recent than any other point @@ -479,11 +485,11 @@ class CreateTimeSeriesRequest(proto.Message): request is 200. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) - time_series = proto.RepeatedField( + time_series: MutableSequence[gm_metric.TimeSeries] = proto.RepeatedField( proto.MESSAGE, number=2, message=gm_metric.TimeSeries, @@ -502,12 +508,12 @@ class CreateTimeSeriesError(proto.Message): ``time_series``. """ - time_series = proto.Field( + time_series: gm_metric.TimeSeries = proto.Field( proto.MESSAGE, number=1, message=gm_metric.TimeSeries, ) - status = proto.Field( + status: status_pb2.Status = proto.Field( proto.MESSAGE, number=2, message=status_pb2.Status, @@ -524,7 +530,7 @@ class CreateTimeSeriesSummary(proto.Message): success_point_count (int): The number of points that were successfully written. - errors (Sequence[google.cloud.monitoring_v3.types.CreateTimeSeriesSummary.Error]): + errors (MutableSequence[google.cloud.monitoring_v3.types.CreateTimeSeriesSummary.Error]): The number of points that failed to be written. Order is not guaranteed. """ @@ -540,25 +546,25 @@ class Error(proto.Message): ``status``. """ - status = proto.Field( + status: status_pb2.Status = proto.Field( proto.MESSAGE, number=1, message=status_pb2.Status, ) - point_count = proto.Field( + point_count: int = proto.Field( proto.INT32, number=2, ) - total_point_count = proto.Field( + total_point_count: int = proto.Field( proto.INT32, number=1, ) - success_point_count = proto.Field( + success_point_count: int = proto.Field( proto.INT32, number=2, ) - errors = proto.RepeatedField( + errors: MutableSequence[Error] = proto.RepeatedField( proto.MESSAGE, number=3, message=Error, @@ -591,19 +597,19 @@ class QueryTimeSeriesRequest(proto.Message): additional results from the previous method call. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - query = proto.Field( + query: str = proto.Field( proto.STRING, number=7, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=9, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=10, ) @@ -615,14 +621,14 @@ class QueryTimeSeriesResponse(proto.Message): Attributes: time_series_descriptor (google.cloud.monitoring_v3.types.TimeSeriesDescriptor): The descriptor for the time series data. - time_series_data (Sequence[google.cloud.monitoring_v3.types.TimeSeriesData]): + time_series_data (MutableSequence[google.cloud.monitoring_v3.types.TimeSeriesData]): The time series data. next_page_token (str): If there are more results than have been returned, then this field is set to a non-empty value. To see the additional results, use that value as ``page_token`` in the next call to this method. - partial_errors (Sequence[google.rpc.status_pb2.Status]): + partial_errors (MutableSequence[google.rpc.status_pb2.Status]): Query execution errors that may have caused the time series data returned to be incomplete. The available data will be available in the @@ -633,21 +639,21 @@ class QueryTimeSeriesResponse(proto.Message): def raw_page(self): return self - time_series_descriptor = proto.Field( + time_series_descriptor: gm_metric.TimeSeriesDescriptor = proto.Field( proto.MESSAGE, number=8, message=gm_metric.TimeSeriesDescriptor, ) - time_series_data = proto.RepeatedField( + time_series_data: MutableSequence[gm_metric.TimeSeriesData] = proto.RepeatedField( proto.MESSAGE, number=9, message=gm_metric.TimeSeriesData, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=10, ) - partial_errors = proto.RepeatedField( + partial_errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( proto.MESSAGE, number=11, message=status_pb2.Status, @@ -659,7 +665,7 @@ class QueryErrorList(proto.Message): errors. Attributes: - errors (Sequence[google.cloud.monitoring_v3.types.QueryError]): + errors (MutableSequence[google.cloud.monitoring_v3.types.QueryError]): Errors in parsing the time series query language text. The number of errors in the response may be limited. @@ -667,12 +673,12 @@ class QueryErrorList(proto.Message): A summary of all the errors. """ - errors = proto.RepeatedField( + errors: MutableSequence[gm_metric.QueryError] = proto.RepeatedField( proto.MESSAGE, number=1, message=gm_metric.QueryError, ) - error_summary = proto.Field( + error_summary: str = proto.Field( proto.STRING, number=2, ) diff --git a/google/cloud/monitoring_v3/types/mutation_record.py b/google/cloud/monitoring_v3/types/mutation_record.py index 3fc2842c..1a56ac68 100644 --- a/google/cloud/monitoring_v3/types/mutation_record.py +++ b/google/cloud/monitoring_v3/types/mutation_record.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -37,12 +39,12 @@ class MutationRecord(proto.Message): change. """ - mutate_time = proto.Field( + mutate_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - mutated_by = proto.Field( + mutated_by: str = proto.Field( proto.STRING, number=2, ) diff --git a/google/cloud/monitoring_v3/types/notification.py b/google/cloud/monitoring_v3/types/notification.py index 184cb6a8..c845dd8c 100644 --- a/google/cloud/monitoring_v3/types/notification.py +++ b/google/cloud/monitoring_v3/types/notification.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import label_pb2 # type: ignore @@ -61,13 +63,13 @@ class NotificationChannelDescriptor(proto.Message): notification channel type. The description may include a description of the properties of the channel and pointers to external documentation. - labels (Sequence[google.api.label_pb2.LabelDescriptor]): + labels (MutableSequence[google.api.label_pb2.LabelDescriptor]): The set of labels that must be defined to identify a particular channel of the corresponding type. Each label includes a description for how that field should be populated. - supported_tiers (Sequence[google.cloud.monitoring_v3.types.ServiceTier]): + supported_tiers (MutableSequence[google.cloud.monitoring_v3.types.ServiceTier]): The tiers that support this notification channel; the project service tier must be one of the supported_tiers. launch_stage (google.api.launch_stage_pb2.LaunchStage): @@ -75,33 +77,33 @@ class NotificationChannelDescriptor(proto.Message): type. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=6, ) - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=3, ) - labels = proto.RepeatedField( + labels: MutableSequence[label_pb2.LabelDescriptor] = proto.RepeatedField( proto.MESSAGE, number=4, message=label_pb2.LabelDescriptor, ) - supported_tiers = proto.RepeatedField( + supported_tiers: MutableSequence[common.ServiceTier] = proto.RepeatedField( proto.ENUM, number=5, enum=common.ServiceTier, ) - launch_stage = proto.Field( + launch_stage: launch_stage_pb2.LaunchStage = proto.Field( proto.ENUM, number=7, enum=launch_stage_pb2.LaunchStage, @@ -144,14 +146,14 @@ class NotificationChannel(proto.Message): provide additional details, beyond the display name, for the channel. This may not exceed 1024 Unicode characters. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Configuration fields that define the channel and its behavior. The permissible and required labels are specified in the [NotificationChannelDescriptor.labels][google.monitoring.v3.NotificationChannelDescriptor.labels] of the ``NotificationChannelDescriptor`` corresponding to the ``type`` field. - user_labels (Mapping[str, str]): + user_labels (MutableMapping[str, str]): User-supplied key/value data that does not need to conform to the corresponding ``NotificationChannelDescriptor``'s schema, unlike the ``labels`` field. This field is intended @@ -199,7 +201,7 @@ class NotificationChannel(proto.Message): future. creation_record (google.cloud.monitoring_v3.types.MutationRecord): Record of the creation of this channel. - mutation_records (Sequence[google.cloud.monitoring_v3.types.MutationRecord]): + mutation_records (MutableSequence[google.cloud.monitoring_v3.types.MutationRecord]): Records of the modification of this channel. """ @@ -215,48 +217,50 @@ class VerificationStatus(proto.Enum): UNVERIFIED = 1 VERIFIED = 2 - type_ = proto.Field( + type_: str = proto.Field( proto.STRING, number=1, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=6, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=3, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=4, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=5, ) - user_labels = proto.MapField( + user_labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=8, ) - verification_status = proto.Field( + verification_status: VerificationStatus = proto.Field( proto.ENUM, number=9, enum=VerificationStatus, ) - enabled = proto.Field( + enabled: wrappers_pb2.BoolValue = proto.Field( proto.MESSAGE, number=11, message=wrappers_pb2.BoolValue, ) - creation_record = proto.Field( + creation_record: mutation_record.MutationRecord = proto.Field( proto.MESSAGE, number=12, message=mutation_record.MutationRecord, ) - mutation_records = proto.RepeatedField( + mutation_records: MutableSequence[ + mutation_record.MutationRecord + ] = proto.RepeatedField( proto.MESSAGE, number=13, message=mutation_record.MutationRecord, diff --git a/google/cloud/monitoring_v3/types/notification_service.py b/google/cloud/monitoring_v3/types/notification_service.py index b05c3767..29cc64a1 100644 --- a/google/cloud/monitoring_v3/types/notification_service.py +++ b/google/cloud/monitoring_v3/types/notification_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.monitoring_v3.types import notification @@ -70,15 +72,15 @@ class ListNotificationChannelDescriptorsRequest(proto.Message): the next set of results. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=4, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -88,7 +90,7 @@ class ListNotificationChannelDescriptorsResponse(proto.Message): r"""The ``ListNotificationChannelDescriptors`` response. Attributes: - channel_descriptors (Sequence[google.cloud.monitoring_v3.types.NotificationChannelDescriptor]): + channel_descriptors (MutableSequence[google.cloud.monitoring_v3.types.NotificationChannelDescriptor]): The monitored resource descriptors supported for the specified project, optionally filtered. next_page_token (str): @@ -102,12 +104,14 @@ class ListNotificationChannelDescriptorsResponse(proto.Message): def raw_page(self): return self - channel_descriptors = proto.RepeatedField( + channel_descriptors: MutableSequence[ + notification.NotificationChannelDescriptor + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=notification.NotificationChannelDescriptor, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -126,7 +130,7 @@ class GetNotificationChannelDescriptorRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/notificationChannelDescriptors/[CHANNEL_TYPE] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) @@ -156,11 +160,11 @@ class CreateNotificationChannelRequest(proto.Message): create. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) - notification_channel = proto.Field( + notification_channel: notification.NotificationChannel = proto.Field( proto.MESSAGE, number=2, message=notification.NotificationChannel, @@ -210,23 +214,23 @@ class ListNotificationChannelsRequest(proto.Message): the next set of results. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=5, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=6, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=7, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -236,7 +240,7 @@ class ListNotificationChannelsResponse(proto.Message): r"""The ``ListNotificationChannels`` response. Attributes: - notification_channels (Sequence[google.cloud.monitoring_v3.types.NotificationChannel]): + notification_channels (MutableSequence[google.cloud.monitoring_v3.types.NotificationChannel]): The notification channels defined for the specified project. next_page_token (str): @@ -255,16 +259,18 @@ class ListNotificationChannelsResponse(proto.Message): def raw_page(self): return self - notification_channels = proto.RepeatedField( + notification_channels: MutableSequence[ + notification.NotificationChannel + ] = proto.RepeatedField( proto.MESSAGE, number=3, message=notification.NotificationChannel, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - total_size = proto.Field( + total_size: int = proto.Field( proto.INT32, number=4, ) @@ -283,7 +289,7 @@ class GetNotificationChannelRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/notificationChannels/[CHANNEL_ID] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) @@ -302,12 +308,12 @@ class UpdateNotificationChannelRequest(proto.Message): fields should also be included in the ``update_mask``. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - notification_channel = proto.Field( + notification_channel: notification.NotificationChannel = proto.Field( proto.MESSAGE, number=3, message=notification.NotificationChannel, @@ -334,11 +340,11 @@ class DeleteNotificationChannelRequest(proto.Message): fail to be deleted in a delete operation. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=3, ) - force = proto.Field( + force: bool = proto.Field( proto.BOOL, number=5, ) @@ -353,7 +359,7 @@ class SendNotificationChannelVerificationCodeRequest(proto.Message): to send a verification code. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -386,11 +392,11 @@ class GetNotificationChannelVerificationCodeRequest(proto.Message): that is permitted). """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - expire_time = proto.Field( + expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, @@ -416,11 +422,11 @@ class GetNotificationChannelVerificationCodeResponse(proto.Message): permitted expiration. """ - code = proto.Field( + code: str = proto.Field( proto.STRING, number=1, ) - expire_time = proto.Field( + expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, @@ -445,11 +451,11 @@ class VerifyNotificationChannelRequest(proto.Message): structure or format of the code). """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - code = proto.Field( + code: str = proto.Field( proto.STRING, number=2, ) diff --git a/google/cloud/monitoring_v3/types/service.py b/google/cloud/monitoring_v3/types/service.py index f0f10b36..07a0be7c 100644 --- a/google/cloud/monitoring_v3/types/service.py +++ b/google/cloud/monitoring_v3/types/service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -90,7 +92,7 @@ class Service(proto.Message): telemetry (google.cloud.monitoring_v3.types.Service.Telemetry): Configuration for how to query telemetry on a Service. - user_labels (Mapping[str, str]): + user_labels (MutableMapping[str, str]): Labels which have been used to annotate the service. Label keys must start with a letter. Label keys and values may contain lowercase @@ -121,7 +123,7 @@ class AppEngine(proto.Message): https://cloud.google.com/monitoring/api/resources#tag_gae_app """ - module_id = proto.Field( + module_id: str = proto.Field( proto.STRING, number=1, ) @@ -138,7 +140,7 @@ class CloudEndpoints(proto.Message): https://cloud.google.com/monitoring/api/resources#tag_api """ - service = proto.Field( + service: str = proto.Field( proto.STRING, number=1, ) @@ -167,19 +169,19 @@ class ClusterIstio(proto.Message): in Istio metrics. """ - location = proto.Field( + location: str = proto.Field( proto.STRING, number=1, ) - cluster_name = proto.Field( + cluster_name: str = proto.Field( proto.STRING, number=2, ) - service_namespace = proto.Field( + service_namespace: str = proto.Field( proto.STRING, number=3, ) - service_name = proto.Field( + service_name: str = proto.Field( proto.STRING, number=4, ) @@ -204,15 +206,15 @@ class MeshIstio(proto.Message): in Istio metrics. """ - mesh_uid = proto.Field( + mesh_uid: str = proto.Field( proto.STRING, number=1, ) - service_namespace = proto.Field( + service_namespace: str = proto.Field( proto.STRING, number=3, ) - service_name = proto.Field( + service_name: str = proto.Field( proto.STRING, number=4, ) @@ -241,15 +243,15 @@ class IstioCanonicalService(proto.Message): metrics `__. """ - mesh_uid = proto.Field( + mesh_uid: str = proto.Field( proto.STRING, number=1, ) - canonical_service_namespace = proto.Field( + canonical_service_namespace: str = proto.Field( proto.STRING, number=3, ) - canonical_service = proto.Field( + canonical_service: str = proto.Field( proto.STRING, number=4, ) @@ -264,61 +266,61 @@ class Telemetry(proto.Message): https://cloud.google.com/apis/design/resource_names. """ - resource_name = proto.Field( + resource_name: str = proto.Field( proto.STRING, number=1, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - custom = proto.Field( + custom: Custom = proto.Field( proto.MESSAGE, number=6, oneof="identifier", message=Custom, ) - app_engine = proto.Field( + app_engine: AppEngine = proto.Field( proto.MESSAGE, number=7, oneof="identifier", message=AppEngine, ) - cloud_endpoints = proto.Field( + cloud_endpoints: CloudEndpoints = proto.Field( proto.MESSAGE, number=8, oneof="identifier", message=CloudEndpoints, ) - cluster_istio = proto.Field( + cluster_istio: ClusterIstio = proto.Field( proto.MESSAGE, number=9, oneof="identifier", message=ClusterIstio, ) - mesh_istio = proto.Field( + mesh_istio: MeshIstio = proto.Field( proto.MESSAGE, number=10, oneof="identifier", message=MeshIstio, ) - istio_canonical_service = proto.Field( + istio_canonical_service: IstioCanonicalService = proto.Field( proto.MESSAGE, number=11, oneof="identifier", message=IstioCanonicalService, ) - telemetry = proto.Field( + telemetry: Telemetry = proto.Field( proto.MESSAGE, number=13, message=Telemetry, ) - user_labels = proto.MapField( + user_labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=14, @@ -371,7 +373,7 @@ class ServiceLevelObjective(proto.Message): ``WEEK``, ``FORTNIGHT``, and ``MONTH`` are supported. This field is a member of `oneof`_ ``period``. - user_labels (Mapping[str, str]): + user_labels (MutableMapping[str, str]): Labels which have been used to annotate the service-level objective. Label keys must start with a letter. Label keys and values may contain @@ -394,36 +396,36 @@ class View(proto.Enum): FULL = 2 EXPLICIT = 1 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=11, ) - service_level_indicator = proto.Field( + service_level_indicator: "ServiceLevelIndicator" = proto.Field( proto.MESSAGE, number=3, message="ServiceLevelIndicator", ) - goal = proto.Field( + goal: float = proto.Field( proto.DOUBLE, number=4, ) - rolling_period = proto.Field( + rolling_period: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=5, oneof="period", message=duration_pb2.Duration, ) - calendar_period = proto.Field( + calendar_period: calendar_period_pb2.CalendarPeriod = proto.Field( proto.ENUM, number=6, oneof="period", enum=calendar_period_pb2.CalendarPeriod, ) - user_labels = proto.MapField( + user_labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=12, @@ -469,19 +471,19 @@ class ServiceLevelIndicator(proto.Message): This field is a member of `oneof`_ ``type``. """ - basic_sli = proto.Field( + basic_sli: "BasicSli" = proto.Field( proto.MESSAGE, number=4, oneof="type", message="BasicSli", ) - request_based = proto.Field( + request_based: "RequestBasedSli" = proto.Field( proto.MESSAGE, number=1, oneof="type", message="RequestBasedSli", ) - windows_based = proto.Field( + windows_based: "WindowsBasedSli" = proto.Field( proto.MESSAGE, number=2, oneof="type", @@ -505,7 +507,7 @@ class BasicSli(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - method (Sequence[str]): + method (MutableSequence[str]): OPTIONAL: The set of RPCs to which this SLI is relevant. Telemetry from other methods will not be used to calculate performance for this @@ -513,7 +515,7 @@ class BasicSli(proto.Message): Service's methods. For service types that don't support breaking down by method, setting this field will result in an error. - location (Sequence[str]): + location (MutableSequence[str]): OPTIONAL: The set of locations to which this SLI is relevant. Telemetry from other locations will not be used to calculate performance for @@ -522,7 +524,7 @@ class BasicSli(proto.Message): service types that don't support breaking down by location, setting this field will result in an error. - version (Sequence[str]): + version (MutableSequence[str]): OPTIONAL: The set of API versions to which this SLI is relevant. Telemetry from other API versions will not be used to calculate @@ -556,31 +558,31 @@ class LatencyCriteria(proto.Message): this service that return in no more than ``threshold``. """ - threshold = proto.Field( + threshold: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=3, message=duration_pb2.Duration, ) - method = proto.RepeatedField( + method: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=7, ) - location = proto.RepeatedField( + location: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) - version = proto.RepeatedField( + version: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=9, ) - availability = proto.Field( + availability: AvailabilityCriteria = proto.Field( proto.MESSAGE, number=2, oneof="sli_criteria", message=AvailabilityCriteria, ) - latency = proto.Field( + latency: LatencyCriteria = proto.Field( proto.MESSAGE, number=3, oneof="sli_criteria", @@ -598,11 +600,11 @@ class Range(proto.Message): Range maximum. """ - min_ = proto.Field( + min_: float = proto.Field( proto.DOUBLE, number=1, ) - max_ = proto.Field( + max_: float = proto.Field( proto.DOUBLE, number=2, ) @@ -635,13 +637,13 @@ class RequestBasedSli(proto.Message): This field is a member of `oneof`_ ``method``. """ - good_total_ratio = proto.Field( + good_total_ratio: "TimeSeriesRatio" = proto.Field( proto.MESSAGE, number=1, oneof="method", message="TimeSeriesRatio", ) - distribution_cut = proto.Field( + distribution_cut: "DistributionCut" = proto.Field( proto.MESSAGE, number=3, oneof="method", @@ -683,15 +685,15 @@ class TimeSeriesRatio(proto.Message): or ``MetricKind = CUMULATIVE``. """ - good_service_filter = proto.Field( + good_service_filter: str = proto.Field( proto.STRING, number=4, ) - bad_service_filter = proto.Field( + bad_service_filter: str = proto.Field( proto.STRING, number=5, ) - total_service_filter = proto.Field( + total_service_filter: str = proto.Field( proto.STRING, number=6, ) @@ -718,11 +720,11 @@ class DistributionCut(proto.Message): value. """ - distribution_filter = proto.Field( + distribution_filter: str = proto.Field( proto.STRING, number=4, ) - range_ = proto.Field( + range_: "Range" = proto.Field( proto.MESSAGE, number=5, message="Range", @@ -794,19 +796,19 @@ class PerformanceThreshold(proto.Message): counted as good. """ - performance = proto.Field( + performance: "RequestBasedSli" = proto.Field( proto.MESSAGE, number=1, oneof="type", message="RequestBasedSli", ) - basic_sli_performance = proto.Field( + basic_sli_performance: "BasicSli" = proto.Field( proto.MESSAGE, number=3, oneof="type", message="BasicSli", ) - threshold = proto.Field( + threshold: float = proto.Field( proto.DOUBLE, number=2, ) @@ -830,40 +832,40 @@ class MetricRange(proto.Message): value. """ - time_series = proto.Field( + time_series: str = proto.Field( proto.STRING, number=1, ) - range_ = proto.Field( + range_: "Range" = proto.Field( proto.MESSAGE, number=4, message="Range", ) - good_bad_metric_filter = proto.Field( + good_bad_metric_filter: str = proto.Field( proto.STRING, number=5, oneof="window_criterion", ) - good_total_ratio_threshold = proto.Field( + good_total_ratio_threshold: PerformanceThreshold = proto.Field( proto.MESSAGE, number=2, oneof="window_criterion", message=PerformanceThreshold, ) - metric_mean_in_range = proto.Field( + metric_mean_in_range: MetricRange = proto.Field( proto.MESSAGE, number=6, oneof="window_criterion", message=MetricRange, ) - metric_sum_in_range = proto.Field( + metric_sum_in_range: MetricRange = proto.Field( proto.MESSAGE, number=7, oneof="window_criterion", message=MetricRange, ) - window_period = proto.Field( + window_period: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=4, message=duration_pb2.Duration, diff --git a/google/cloud/monitoring_v3/types/service_service.py b/google/cloud/monitoring_v3/types/service_service.py index 01a26ba5..b86a0cfe 100644 --- a/google/cloud/monitoring_v3/types/service_service.py +++ b/google/cloud/monitoring_v3/types/service_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.monitoring_v3.types import service as gm_service @@ -58,15 +60,15 @@ class CreateServiceRequest(proto.Message): Required. The ``Service`` to create. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - service_id = proto.Field( + service_id: str = proto.Field( proto.STRING, number=3, ) - service = proto.Field( + service: gm_service.Service = proto.Field( proto.MESSAGE, number=2, message=gm_service.Service, @@ -85,7 +87,7 @@ class GetServiceRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -140,19 +142,19 @@ class ListServicesRequest(proto.Message): additional results from the previous method call. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -162,7 +164,7 @@ class ListServicesResponse(proto.Message): r"""The ``ListServices`` response. Attributes: - services (Sequence[google.cloud.monitoring_v3.types.Service]): + services (MutableSequence[google.cloud.monitoring_v3.types.Service]): The ``Service``\ s matching the specified filter. next_page_token (str): If there are more results than have been returned, then this @@ -175,12 +177,12 @@ class ListServicesResponse(proto.Message): def raw_page(self): return self - services = proto.RepeatedField( + services: MutableSequence[gm_service.Service] = proto.RepeatedField( proto.MESSAGE, number=1, message=gm_service.Service, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -198,12 +200,12 @@ class UpdateServiceRequest(proto.Message): use for the update. """ - service = proto.Field( + service: gm_service.Service = proto.Field( proto.MESSAGE, number=1, message=gm_service.Service, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, @@ -223,7 +225,7 @@ class DeleteServiceRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -250,15 +252,15 @@ class CreateServiceLevelObjectiveRequest(proto.Message): ``ServiceLevelObjective`` exists with this name. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - service_level_objective_id = proto.Field( + service_level_objective_id: str = proto.Field( proto.STRING, number=3, ) - service_level_objective = proto.Field( + service_level_objective: gm_service.ServiceLevelObjective = proto.Field( proto.MESSAGE, number=2, message=gm_service.ServiceLevelObjective, @@ -285,11 +287,11 @@ class GetServiceLevelObjectiveRequest(proto.Message): ``RequestBasedSli`` spelling out how the SLI is computed. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - view = proto.Field( + view: gm_service.ServiceLevelObjective.View = proto.Field( proto.ENUM, number=2, enum=gm_service.ServiceLevelObjective.View, @@ -330,23 +332,23 @@ class ListServiceLevelObjectivesRequest(proto.Message): ``RequestBasedSli`` spelling out how the SLI is computed. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) - view = proto.Field( + view: gm_service.ServiceLevelObjective.View = proto.Field( proto.ENUM, number=5, enum=gm_service.ServiceLevelObjective.View, @@ -357,7 +359,7 @@ class ListServiceLevelObjectivesResponse(proto.Message): r"""The ``ListServiceLevelObjectives`` response. Attributes: - service_level_objectives (Sequence[google.cloud.monitoring_v3.types.ServiceLevelObjective]): + service_level_objectives (MutableSequence[google.cloud.monitoring_v3.types.ServiceLevelObjective]): The ``ServiceLevelObjective``\ s matching the specified filter. next_page_token (str): @@ -371,12 +373,14 @@ class ListServiceLevelObjectivesResponse(proto.Message): def raw_page(self): return self - service_level_objectives = proto.RepeatedField( + service_level_objectives: MutableSequence[ + gm_service.ServiceLevelObjective + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=gm_service.ServiceLevelObjective, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -394,12 +398,12 @@ class UpdateServiceLevelObjectiveRequest(proto.Message): use for the update. """ - service_level_objective = proto.Field( + service_level_objective: gm_service.ServiceLevelObjective = proto.Field( proto.MESSAGE, number=1, message=gm_service.ServiceLevelObjective, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, @@ -419,7 +423,7 @@ class DeleteServiceLevelObjectiveRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) diff --git a/google/cloud/monitoring_v3/types/span_context.py b/google/cloud/monitoring_v3/types/span_context.py index f2e50f05..173bbf0f 100644 --- a/google/cloud/monitoring_v3/types/span_context.py +++ b/google/cloud/monitoring_v3/types/span_context.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore @@ -52,7 +54,7 @@ class SpanContext(proto.Message): 8-byte array. """ - span_name = proto.Field( + span_name: str = proto.Field( proto.STRING, number=1, ) diff --git a/google/cloud/monitoring_v3/types/uptime.py b/google/cloud/monitoring_v3/types/uptime.py index 9abcf08e..505ddd43 100644 --- a/google/cloud/monitoring_v3/types/uptime.py +++ b/google/cloud/monitoring_v3/types/uptime.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import monitored_resource_pb2 # type: ignore @@ -97,27 +99,27 @@ class State(proto.Enum): CREATING = 1 RUNNING = 2 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - network = proto.Field( + network: str = proto.Field( proto.STRING, number=3, ) - gcp_zone = proto.Field( + gcp_zone: str = proto.Field( proto.STRING, number=4, ) - peer_project_id = proto.Field( + peer_project_id: str = proto.Field( proto.STRING, number=6, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=7, enum=State, @@ -189,14 +191,14 @@ class UptimeCheckConfig(proto.Message): The maximum amount of time to wait for the request to complete (must be between 1 and 60 seconds). Required. - content_matchers (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig.ContentMatcher]): + content_matchers (MutableSequence[google.cloud.monitoring_v3.types.UptimeCheckConfig.ContentMatcher]): The content that is expected to appear in the data returned by the target server against which the check is run. Currently, only the first entry in the ``content_matchers`` list is supported, and additional entries will be ignored. This field is optional and should only be specified if a content match is required as part of the/ Uptime check. - selected_regions (Sequence[google.cloud.monitoring_v3.types.UptimeCheckRegion]): + selected_regions (MutableSequence[google.cloud.monitoring_v3.types.UptimeCheckRegion]): The list of regions from which the check will be run. Some regions contain one location, and others contain more than one. If this field is @@ -211,7 +213,7 @@ class UptimeCheckConfig(proto.Message): provide 'selected_regions' when is_internal is ``true``, or to provide 'internal_checkers' when is_internal is ``false``. - internal_checkers (Sequence[google.cloud.monitoring_v3.types.InternalChecker]): + internal_checkers (MutableSequence[google.cloud.monitoring_v3.types.InternalChecker]): The internal checkers that this check will egress from. If ``is_internal`` is ``true`` and this list is empty, the check will egress from all the InternalCheckers configured @@ -232,11 +234,11 @@ class ResourceGroup(proto.Message): The resource type of the group members. """ - group_id = proto.Field( + group_id: str = proto.Field( proto.STRING, number=1, ) - resource_type = proto.Field( + resource_type: "GroupResourceType" = proto.Field( proto.ENUM, number=2, enum="GroupResourceType", @@ -275,7 +277,7 @@ class HttpCheck(proto.Message): responsible for encrypting the headers. On Get/List calls, if ``mask_headers`` is set to ``true`` then the headers will be obscured with ``******.`` - headers (Mapping[str, str]): + headers (MutableMapping[str, str]): The list of headers to send as part of the Uptime check request. If two headers have the same key and different values, they should be @@ -347,56 +349,56 @@ class BasicAuthentication(proto.Message): the HTTP server. """ - username = proto.Field( + username: str = proto.Field( proto.STRING, number=1, ) - password = proto.Field( + password: str = proto.Field( proto.STRING, number=2, ) - request_method = proto.Field( + request_method: "UptimeCheckConfig.HttpCheck.RequestMethod" = proto.Field( proto.ENUM, number=8, enum="UptimeCheckConfig.HttpCheck.RequestMethod", ) - use_ssl = proto.Field( + use_ssl: bool = proto.Field( proto.BOOL, number=1, ) - path = proto.Field( + path: str = proto.Field( proto.STRING, number=2, ) - port = proto.Field( + port: int = proto.Field( proto.INT32, number=3, ) - auth_info = proto.Field( + auth_info: "UptimeCheckConfig.HttpCheck.BasicAuthentication" = proto.Field( proto.MESSAGE, number=4, message="UptimeCheckConfig.HttpCheck.BasicAuthentication", ) - mask_headers = proto.Field( + mask_headers: bool = proto.Field( proto.BOOL, number=5, ) - headers = proto.MapField( + headers: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=6, ) - content_type = proto.Field( + content_type: "UptimeCheckConfig.HttpCheck.ContentType" = proto.Field( proto.ENUM, number=9, enum="UptimeCheckConfig.HttpCheck.ContentType", ) - validate_ssl = proto.Field( + validate_ssl: bool = proto.Field( proto.BOOL, number=7, ) - body = proto.Field( + body: bytes = proto.Field( proto.BYTES, number=10, ) @@ -411,7 +413,7 @@ class TcpCheck(proto.Message): ``monitored_resource``) to construct the full URL. Required. """ - port = proto.Field( + port: int = proto.Field( proto.INT32, number=1, ) @@ -442,73 +444,73 @@ class ContentMatcherOption(proto.Enum): MATCHES_REGEX = 3 NOT_MATCHES_REGEX = 4 - content = proto.Field( + content: str = proto.Field( proto.STRING, number=1, ) - matcher = proto.Field( + matcher: "UptimeCheckConfig.ContentMatcher.ContentMatcherOption" = proto.Field( proto.ENUM, number=2, enum="UptimeCheckConfig.ContentMatcher.ContentMatcherOption", ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - monitored_resource = proto.Field( + monitored_resource: monitored_resource_pb2.MonitoredResource = proto.Field( proto.MESSAGE, number=3, oneof="resource", message=monitored_resource_pb2.MonitoredResource, ) - resource_group = proto.Field( + resource_group: ResourceGroup = proto.Field( proto.MESSAGE, number=4, oneof="resource", message=ResourceGroup, ) - http_check = proto.Field( + http_check: HttpCheck = proto.Field( proto.MESSAGE, number=5, oneof="check_request_type", message=HttpCheck, ) - tcp_check = proto.Field( + tcp_check: TcpCheck = proto.Field( proto.MESSAGE, number=6, oneof="check_request_type", message=TcpCheck, ) - period = proto.Field( + period: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=7, message=duration_pb2.Duration, ) - timeout = proto.Field( + timeout: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=8, message=duration_pb2.Duration, ) - content_matchers = proto.RepeatedField( + content_matchers: MutableSequence[ContentMatcher] = proto.RepeatedField( proto.MESSAGE, number=9, message=ContentMatcher, ) - selected_regions = proto.RepeatedField( + selected_regions: MutableSequence["UptimeCheckRegion"] = proto.RepeatedField( proto.ENUM, number=10, enum="UptimeCheckRegion", ) - is_internal = proto.Field( + is_internal: bool = proto.Field( proto.BOOL, number=15, ) - internal_checkers = proto.RepeatedField( + internal_checkers: MutableSequence["InternalChecker"] = proto.RepeatedField( proto.MESSAGE, number=14, message="InternalChecker", @@ -540,16 +542,16 @@ class UptimeCheckIp(proto.Message): or IPv6 format. """ - region = proto.Field( + region: "UptimeCheckRegion" = proto.Field( proto.ENUM, number=1, enum="UptimeCheckRegion", ) - location = proto.Field( + location: str = proto.Field( proto.STRING, number=2, ) - ip_address = proto.Field( + ip_address: str = proto.Field( proto.STRING, number=3, ) diff --git a/google/cloud/monitoring_v3/types/uptime_service.py b/google/cloud/monitoring_v3/types/uptime_service.py index 3b96adfe..e433b8df 100644 --- a/google/cloud/monitoring_v3/types/uptime_service.py +++ b/google/cloud/monitoring_v3/types/uptime_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.monitoring_v3.types import uptime @@ -59,15 +61,15 @@ class ListUptimeCheckConfigsRequest(proto.Message): results from the previous method call. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -77,7 +79,7 @@ class ListUptimeCheckConfigsResponse(proto.Message): r"""The protocol for the ``ListUptimeCheckConfigs`` response. Attributes: - uptime_check_configs (Sequence[google.cloud.monitoring_v3.types.UptimeCheckConfig]): + uptime_check_configs (MutableSequence[google.cloud.monitoring_v3.types.UptimeCheckConfig]): The returned Uptime check configurations. next_page_token (str): This field represents the pagination token to retrieve the @@ -96,16 +98,18 @@ class ListUptimeCheckConfigsResponse(proto.Message): def raw_page(self): return self - uptime_check_configs = proto.RepeatedField( + uptime_check_configs: MutableSequence[ + uptime.UptimeCheckConfig + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=uptime.UptimeCheckConfig, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - total_size = proto.Field( + total_size: int = proto.Field( proto.INT32, number=3, ) @@ -124,7 +128,7 @@ class GetUptimeCheckConfigRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -146,11 +150,11 @@ class CreateUptimeCheckConfigRequest(proto.Message): Required. The new Uptime check configuration. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - uptime_check_config = proto.Field( + uptime_check_config: uptime.UptimeCheckConfig = proto.Field( proto.MESSAGE, number=2, message=uptime.UptimeCheckConfig, @@ -183,12 +187,12 @@ class UpdateUptimeCheckConfigRequest(proto.Message): ``content_matchers``, and ``selected_regions``. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - uptime_check_config = proto.Field( + uptime_check_config: uptime.UptimeCheckConfig = proto.Field( proto.MESSAGE, number=3, message=uptime.UptimeCheckConfig, @@ -208,7 +212,7 @@ class DeleteUptimeCheckConfigRequest(proto.Message): projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID] """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -233,11 +237,11 @@ class ListUptimeCheckIpsRequest(proto.Message): not yet implemented """ - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -247,7 +251,7 @@ class ListUptimeCheckIpsResponse(proto.Message): r"""The protocol for the ``ListUptimeCheckIps`` response. Attributes: - uptime_check_ips (Sequence[google.cloud.monitoring_v3.types.UptimeCheckIp]): + uptime_check_ips (MutableSequence[google.cloud.monitoring_v3.types.UptimeCheckIp]): The returned list of IP addresses (including region and location) that the checkers run from. next_page_token (str): @@ -263,12 +267,12 @@ class ListUptimeCheckIpsResponse(proto.Message): def raw_page(self): return self - uptime_check_ips = proto.RepeatedField( + uptime_check_ips: MutableSequence[uptime.UptimeCheckIp] = proto.RepeatedField( proto.MESSAGE, number=1, message=uptime.UptimeCheckIp, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) diff --git a/noxfile.py b/noxfile.py index e9e403ee..8d62faa7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -276,12 +276,16 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -298,13 +302,16 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/owlbot.py b/owlbot.py index 9323371d..5a446143 100644 --- a/owlbot.py +++ b/owlbot.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,42 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This script is used to synthesize generated parts of this library.""" -import re +import json import os +from pathlib import Path +import re +import shutil import synthtool as s import synthtool.gcp as gcp from synthtool.languages import python -import logging - -AUTOSYNTH_MULTIPLE_COMMITS = True -logging.basicConfig(level=logging.DEBUG) +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- -common = gcp.CommonTemplates() +clean_up_generated_samples = True -default_version = "v3" +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) for library in s.get_staging_dirs(default_version): - # Synth hack due to googleapis and python-api-common-protos out of sync. - for pattern in [ - "monitored_resource_types=\['monitored_resource_types_value'\],", - "assert response.monitored_resource_types == \['monitored_resource_types_value'\]", - "launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED,", - "assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED", - ]: - s.replace(library / "tests/unit/gapic/monitoring_v3/test_*.py", - pattern, - "" - ) - - # Synth hack due to microgenerator uses "type_" while api-common-protos uses "type". - for file in ["test_uptime_check_service.py", "test_metric_service.py"]: - s.replace(library / f"tests/unit/gapic/monitoring_v3/{file}", - "type_", - "type" - ) + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False # Comment out broken path helper 'metric_descriptor_path' # https://github.com/googleapis/gapic-generator-python/issues/701 @@ -71,86 +60,28 @@ '''"""\g<1>"""\ndef''', re.MULTILINE| re.DOTALL ) - - # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace(library / f"google/cloud/monitoring_{library.name}/types/service.py", - r""". - Attributes:""", - r""".\n - Attributes:""", - ) - - # don't copy nox.py, setup.py, README.rst, docs/index.rst - excludes = ["nox.py", "setup.py", "README.rst", "docs/index.rst"] - s.move(library, excludes=excludes) - + s.move([library], excludes=["**/gapic_version.py", "docs/index.rst", "setup.py", "testing/constraints-3.7.txt"]) s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library( - samples=True, # set to True only if there are samples - microgenerator=True, + +templated_files = gcp.CommonTemplates().py_library( + cov_level=99, + samples=True, unit_test_extras=["pandas"], system_test_extras=["pandas"], - cov_level=99 -) -s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file - -# ---------------------------------------------------------------------------- -# master --> main edits; context: https://github.com/googleapis/google-cloud-python/issues/10579 -# ---------------------------------------------------------------------------- - -s.replace( - "docs/conf.py", - "master_doc", - "root_doc", -) - -s.replace( - "docs/conf.py", - "# The master toctree document.", - "# The root toctree document.", -) - -s.replace( - ".kokoro/test-samples-impl.sh", - "https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.", - "https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.", -) - -s.replace( - ".kokoro/build.sh", - "https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.", - "https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.", -) - -s.replace( - "CONTRIBUTING.rst", - "master", - "main", -) - -# Revert the change from above, because kubernetes is still using master: -s.replace( - "CONTRIBUTING.rst", - r"https://github.com/kubernetes/community/blob/main/contributors/guide/pull-requests.md#best-practices-for-faster-reviews", - r"https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews", + microgenerator=True, + versions=gcp.common.detect_versions(path="./google", default_first=True), ) +s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml", "docs/index.rst"]) -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- python.py_samples(skip_readmes=True) -# Work around bug in templates https://github.com/googleapis/synthtool/pull/1335 -s.replace(".github/workflows/unittest.yml", "--fail-under=100", "--fail-under=99") - -python.configure_previous_major_version_branches() - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) - +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) # -------------------------------------------------------------------------- # Modify test configs diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 00000000..8ea43394 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,25 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/monitoring/gapic_version.py", + "google/cloud/monitoring_v3/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.monitoring.v3.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} + diff --git a/samples/generated_samples/snippet_metadata_monitoring_v3.json b/samples/generated_samples/snippet_metadata_google.monitoring.v3.json similarity index 99% rename from samples/generated_samples/snippet_metadata_monitoring_v3.json rename to samples/generated_samples/snippet_metadata_google.monitoring.v3.json index 0eef96cd..e3ab6eeb 100644 --- a/samples/generated_samples/snippet_metadata_monitoring_v3.json +++ b/samples/generated_samples/snippet_metadata_google.monitoring.v3.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-monitoring" + "name": "google-cloud-monitoring", + "version": "2.12.0" }, "snippets": [ { @@ -1990,7 +1991,7 @@ }, { "name": "time_series", - "type": "Sequence[google.cloud.monitoring_v3.types.TimeSeries]" + "type": "MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]" }, { "name": "retry", @@ -2071,7 +2072,7 @@ }, { "name": "time_series", - "type": "Sequence[google.cloud.monitoring_v3.types.TimeSeries]" + "type": "MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]" }, { "name": "retry", @@ -2153,7 +2154,7 @@ }, { "name": "time_series", - "type": "Sequence[google.cloud.monitoring_v3.types.TimeSeries]" + "type": "MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]" }, { "name": "retry", @@ -2234,7 +2235,7 @@ }, { "name": "time_series", - "type": "Sequence[google.cloud.monitoring_v3.types.TimeSeries]" + "type": "MutableSequence[google.cloud.monitoring_v3.types.TimeSeries]" }, { "name": "retry", diff --git a/samples/snippets/README b/samples/snippets/README new file mode 100644 index 00000000..ceb0b8e9 --- /dev/null +++ b/samples/snippets/README @@ -0,0 +1,3 @@ +These samples have been moved. + +https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/monitoring/snippets/v3 \ No newline at end of file diff --git a/samples/snippets/v3/alerts-client/.gitignore b/samples/snippets/v3/alerts-client/.gitignore deleted file mode 100644 index de0a466d..00000000 --- a/samples/snippets/v3/alerts-client/.gitignore +++ /dev/null @@ -1 +0,0 @@ -backup.json diff --git a/samples/snippets/v3/alerts-client/README.rst b/samples/snippets/v3/alerts-client/README.rst deleted file mode 100644 index bb59aad5..00000000 --- a/samples/snippets/v3/alerts-client/README.rst +++ /dev/null @@ -1,138 +0,0 @@ -.. This file is automatically generated. Do not edit this file directly. - -Google Stackdriver Alerting API Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/alerts-client/README.rst - - -This directory contains samples for Google Stackdriver Alerting API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver's Alerting API allows you to create, delete, and make back up copies of your alert policies. - - - - -.. _Google Stackdriver Alerting API: https://cloud.google.com/monitoring/alerts/ - -To run the sample, you need to enable the API at: https://console.cloud.google.com/apis/library/monitoring.googleapis.com - -To run the sample, you need to have `Monitoring Admin` role. - -Please visit [the Cloud Console UI of this API](https://console.cloud.google.com/monitoring) and [create a new Workspace with the same name of your Cloud project](https://cloud.google.com/monitoring/workspaces/create). - - -Setup -------------------------------------------------------------------------------- - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - -Samples -------------------------------------------------------------------------------- - -Snippets -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/alerts-client/snippets.py,monitoring/api/v3/alerts-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python snippets.py - - usage: snippets.py [-h] - {list-alert-policies,list-notification-channels,enable-alert-policies,disable-alert-policies,replace-notification-channels,backup,restore} - ... - - Demonstrates AlertPolicy API operations. - - positional arguments: - {list-alert-policies,list-notification-channels,enable-alert-policies,disable-alert-policies,replace-notification-channels,backup,restore} - list-alert-policies - list-notification-channels - enable-alert-policies - Enable or disable alert policies in a project. - Arguments: project_name (str) enable (bool): Enable or - disable the policies. filter_ (str, optional): Only - enable/disable alert policies that match this filter_. - See - https://cloud.google.com/monitoring/api/v3/sorting- - and-filtering - disable-alert-policies - Enable or disable alert policies in a project. - Arguments: project_name (str) enable (bool): Enable or - disable the policies. filter_ (str, optional): Only - enable/disable alert policies that match this filter_. - See - https://cloud.google.com/monitoring/api/v3/sorting- - and-filtering - replace-notification-channels - backup - restore - - optional arguments: - -h, --help show this help message and exit - - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/samples/snippets/v3/alerts-client/README.rst.in b/samples/snippets/v3/alerts-client/README.rst.in deleted file mode 100644 index 00b28012..00000000 --- a/samples/snippets/v3/alerts-client/README.rst.in +++ /dev/null @@ -1,33 +0,0 @@ -# This file is used to generate README.rst - -product: - name: Google Stackdriver Alerting API - short_name: Stackdriver Alerting API - url: https://cloud.google.com/monitoring/alerts/ - description: > - Stackdriver Monitoring collects metrics, events, and metadata from Google - Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, - application instrumentation, and a variety of common application - components including Cassandra, Nginx, Apache Web Server, Elasticsearch - and many others. Stackdriver's Alerting API allows you to create, - delete, and make back up copies of your alert policies. - -required_api_url: https://console.cloud.google.com/apis/library/monitoring.googleapis.com -required_role: Monitoring Admin -other_required_steps: > - Please visit [the Cloud Console UI of this - API](https://console.cloud.google.com/monitoring) and create a new - Workspace with the same name of your Cloud project. - -setup: -- auth -- install_deps - -samples: -- name: Snippets - file: snippets.py - show_help: true - -cloud_client_library: true - -folder: monitoring/api/v3/alerts-client diff --git a/samples/snippets/v3/alerts-client/noxfile.py b/samples/snippets/v3/alerts-client/noxfile.py deleted file mode 100644 index 0398d72f..00000000 --- a/samples/snippets/v3/alerts-client/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) - elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/snippets/v3/alerts-client/noxfile_config.py b/samples/snippets/v3/alerts-client/noxfile_config.py deleted file mode 100644 index 664c5830..00000000 --- a/samples/snippets/v3/alerts-client/noxfile_config.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Default TEST_CONFIG_OVERRIDE for python repos. - -# You can copy this file into your directory, then it will be inported from -# the noxfile.py. - -# The source of truth: -# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py - -TEST_CONFIG_OVERRIDE = { - # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - - # Declare optional test sessions you want to opt-in. Currently we - # have the following optional test sessions: - # 'cloud_run' # Test session for Cloud Run application. - 'opt_in_sessions': [], - - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', - 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - 'envs': {}, -} diff --git a/samples/snippets/v3/alerts-client/requirements-test.txt b/samples/snippets/v3/alerts-client/requirements-test.txt deleted file mode 100644 index 8a77da2a..00000000 --- a/samples/snippets/v3/alerts-client/requirements-test.txt +++ /dev/null @@ -1,3 +0,0 @@ -pytest==7.1.3 -retrying==1.3.3 -flaky==3.7.0 diff --git a/samples/snippets/v3/alerts-client/requirements.txt b/samples/snippets/v3/alerts-client/requirements.txt deleted file mode 100644 index db5d369c..00000000 --- a/samples/snippets/v3/alerts-client/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -google-cloud-monitoring==2.11.2 -tabulate==0.8.10 diff --git a/samples/snippets/v3/alerts-client/snippets.py b/samples/snippets/v3/alerts-client/snippets.py deleted file mode 100644 index aeaa45a1..00000000 --- a/samples/snippets/v3/alerts-client/snippets.py +++ /dev/null @@ -1,412 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import argparse -import json -import os - -import google.api_core.exceptions -from google.cloud import monitoring_v3 -from google.protobuf import field_mask_pb2 as field_mask -import proto -import tabulate - - -# [START monitoring_alert_list_policies] -def list_alert_policies(project_name): - """List alert policies in a project. - - Arguments: - project_name (str): The Google Cloud Project to use. The project name - must be in the format - 'projects/'. - """ - - client = monitoring_v3.AlertPolicyServiceClient() - policies = client.list_alert_policies(name=project_name) - print( - str( - tabulate.tabulate( - [(policy.name, policy.display_name) for policy in policies], - ("name", "display_name"), - ) - ) - ) - - -# [END monitoring_alert_list_policies] - - -# [START monitoring_alert_list_channels] -def list_notification_channels(project_name): - """List alert notification channels in a project. - - Arguments: - project_name (str): The Google Cloud Project to use. The project name - must be in the format - 'projects/'. - """ - - client = monitoring_v3.NotificationChannelServiceClient() - channels = client.list_notification_channels(name=project_name) - print( - tabulate.tabulate( - [(channel.name, channel.display_name) for channel in channels], - ("name", "display_name"), - ) - ) - - -# [END monitoring_alert_list_channels] - - -# [START monitoring_alert_enable_policies] -def enable_alert_policies(project_name, enable, filter_=None): - """Enable or disable alert policies in a project. - - Arguments: - project_name (str): The Google Cloud Project to use. The project name - must be in the format - 'projects/'. - enable (bool): Enable or disable the policies. - filter_ (str, optional): Only enable/disable alert policies that match - this filter_. See - https://cloud.google.com/monitoring/api/v3/sorting-and-filtering - """ - - client = monitoring_v3.AlertPolicyServiceClient() - policies = client.list_alert_policies( - request={"name": project_name, "filter": filter_} - ) - - for policy in policies: - if bool(enable) == policy.enabled: - print( - "Policy", - policy.name, - "is already", - "enabled" if policy.enabled else "disabled", - ) - else: - policy.enabled = bool(enable) - mask = field_mask.FieldMask() - mask.paths.append("enabled") - client.update_alert_policy(alert_policy=policy, update_mask=mask) - print("Enabled" if enable else "Disabled", policy.name) - - -# [END monitoring_alert_enable_policies] - - -# [START monitoring_alert_replace_channels] -def replace_notification_channels(project_name, alert_policy_id, channel_ids): - """Replace notification channel of an alert. - - Arguments: - project_name (str): The Google Cloud Project to use. The project name - must be in the format - 'projects/'. - alert_policy_id (str): The ID of the alert policy whose notification - channels are to be replaced. - channel_ids (str): ID of notification channel to be added as channel - for the given alert policy. - """ - - _, project_id = project_name.split("/") - alert_client = monitoring_v3.AlertPolicyServiceClient() - channel_client = monitoring_v3.NotificationChannelServiceClient() - policy = monitoring_v3.AlertPolicy() - policy.name = alert_client.alert_policy_path(project_id, alert_policy_id) - - for channel_id in channel_ids: - policy.notification_channels.append( - channel_client.notification_channel_path(project_id, channel_id) - ) - - mask = field_mask.FieldMask() - mask.paths.append("notification_channels") - updated_policy = alert_client.update_alert_policy( - alert_policy=policy, update_mask=mask - ) - print("Updated", updated_policy.name) - - -# [END monitoring_alert_replace_channels] - - -# [START monitoring_alert_delete_channel] -def delete_notification_channels(project_name, channel_ids, force=None): - """Delete alert notification channels. - - Arguments: - project_name (str): The Google Cloud Project to use. The project name - must be in the format - 'projects/'. - channel_ids list(str): List of IDs of notification channels to delete. - force (bool): If true, the notification channels are deleted regardless - of its in use by alert policies. If false, channels that are still - referenced by an existing alerting policy will fail to be deleted. - """ - - channel_client = monitoring_v3.NotificationChannelServiceClient() - for channel_id in channel_ids: - channel_name = "{}/notificationChannels/{}".format(project_name, channel_id) - try: - channel_client.delete_notification_channel(name=channel_name, force=force) - print("Channel {} deleted".format(channel_name)) - except ValueError: - print("The parameters are invalid") - except Exception as e: - print("API call failed: {}".format(e)) - - -# [END monitoring_alert_delete_channel] - - -# [START monitoring_alert_backup_policies] -def backup(project_name, backup_filename): - """Backup alert policies from a project to a local file. - - Arguments: - project_name (str): The Google Cloud Project to use. The project name - must be in the format - 'projects/' - backup_filename (str): Name of the file (along with its path) to which - the alert policies will be written as backup. - """ - - alert_client = monitoring_v3.AlertPolicyServiceClient() - channel_client = monitoring_v3.NotificationChannelServiceClient() - record = { - "project_name": project_name, - "policies": list(alert_client.list_alert_policies(name=project_name)), - "channels": list(channel_client.list_notification_channels(name=project_name)), - } - json.dump(record, open(backup_filename, "wt"), cls=ProtoEncoder, indent=2) - print( - "Backed up alert policies and notification channels to {}.".format( - backup_filename - ) - ) - - -class ProtoEncoder(json.JSONEncoder): - """Encode protobufs as json.""" - - def default(self, obj): - if type(obj) in (monitoring_v3.AlertPolicy, monitoring_v3.NotificationChannel): - text = proto.Message.to_json(obj) - return json.loads(text) - return super(ProtoEncoder, self).default(obj) - - -# [END monitoring_alert_backup_policies] - - -# [START monitoring_alert_restore_policies] -# [START monitoring_alert_create_policy] -# [START monitoring_alert_create_channel] -# [START monitoring_alert_update_channel] -# [START monitoring_alert_enable_channel] -def restore(project_name, backup_filename): - print( - "Loading alert policies and notification channels from {}.".format( - backup_filename - ) - ) - record = json.load(open(backup_filename, "rt")) - is_same_project = project_name == record["project_name"] - # Convert dicts to AlertPolicies. - policies_json = [json.dumps(policy) for policy in record["policies"]] - policies = [ - monitoring_v3.AlertPolicy.from_json(policy_json) - for policy_json in policies_json - ] - # Convert dicts to NotificationChannels - channels_json = [json.dumps(channel) for channel in record["channels"]] - channels = [ - monitoring_v3.NotificationChannel.from_json(channel_json) - for channel_json in channels_json - ] - - # Restore the channels. - channel_client = monitoring_v3.NotificationChannelServiceClient() - channel_name_map = {} - - for channel in channels: - updated = False - print("Updating channel", channel.display_name) - # This field is immutable and it is illegal to specify a - # non-default value (UNVERIFIED or VERIFIED) in the - # Create() or Update() operations. - channel.verification_status = ( - monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED - ) - - if is_same_project: - try: - channel_client.update_notification_channel(notification_channel=channel) - updated = True - except google.api_core.exceptions.NotFound: - pass # The channel was deleted. Create it below. - - if not updated: - # The channel no longer exists. Recreate it. - old_name = channel.name - del channel.name - new_channel = channel_client.create_notification_channel( - name=project_name, notification_channel=channel - ) - channel_name_map[old_name] = new_channel.name - - # Restore the alerts - alert_client = monitoring_v3.AlertPolicyServiceClient() - - for policy in policies: - print("Updating policy", policy.display_name) - # These two fields cannot be set directly, so clear them. - del policy.creation_record - del policy.mutation_record - - # Update old channel names with new channel names. - for i, channel in enumerate(policy.notification_channels): - new_channel = channel_name_map.get(channel) - if new_channel: - policy.notification_channels[i] = new_channel - - updated = False - - if is_same_project: - try: - alert_client.update_alert_policy(alert_policy=policy) - updated = True - except google.api_core.exceptions.NotFound: - pass # The policy was deleted. Create it below. - except google.api_core.exceptions.InvalidArgument: - # Annoying that API throws InvalidArgument when the policy - # does not exist. Seems like it should throw NotFound. - pass # The policy was deleted. Create it below. - - if not updated: - # The policy no longer exists. Recreate it. - old_name = policy.name - del policy.name - for condition in policy.conditions: - del condition.name - policy = alert_client.create_alert_policy( - name=project_name, alert_policy=policy - ) - print("Updated", policy.name) - - -# [END monitoring_alert_enable_channel] -# [END monitoring_alert_restore_policies] -# [END monitoring_alert_create_policy] -# [END monitoring_alert_create_channel] -# [END monitoring_alert_update_channel] - - -class MissingProjectIdError(Exception): - pass - - -def project_id(): - """Retreieves the project id from the environment variable. - - Raises: - MissingProjectIdError -- When not set. - - Returns: - str -- the project name - """ - project_id = os.environ["GOOGLE_CLOUD_PROJECT"] - - if not project_id: - raise MissingProjectIdError( - "Set the environment variable " - + "GCLOUD_PROJECT to your Google Cloud Project Id." - ) - return project_id - - -def project_name(): - return "projects/" + project_id() - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser( - description="Demonstrates AlertPolicy API operations." - ) - - subparsers = parser.add_subparsers(dest="command") - - list_alert_policies_parser = subparsers.add_parser( - "list-alert-policies", help=list_alert_policies.__doc__ - ) - - list_notification_channels_parser = subparsers.add_parser( - "list-notification-channels", help=list_alert_policies.__doc__ - ) - - enable_alert_policies_parser = subparsers.add_parser( - "enable-alert-policies", help=enable_alert_policies.__doc__ - ) - enable_alert_policies_parser.add_argument( - "--filter", - ) - - disable_alert_policies_parser = subparsers.add_parser( - "disable-alert-policies", help=enable_alert_policies.__doc__ - ) - disable_alert_policies_parser.add_argument( - "--filter", - ) - - replace_notification_channels_parser = subparsers.add_parser( - "replace-notification-channels", help=replace_notification_channels.__doc__ - ) - replace_notification_channels_parser.add_argument( - "-p", "--alert_policy_id", required=True - ) - replace_notification_channels_parser.add_argument( - "-c", "--notification_channel_id", required=True, action="append" - ) - - backup_parser = subparsers.add_parser("backup", help=backup.__doc__) - backup_parser.add_argument("--backup_to_filename", required=True) - - restore_parser = subparsers.add_parser("restore", help=restore.__doc__) - restore_parser.add_argument("--restore_from_filename", required=True) - - args = parser.parse_args() - - if args.command == "list-alert-policies": - list_alert_policies(project_name()) - - elif args.command == "list-notification-channels": - list_notification_channels(project_name()) - - elif args.command == "enable-alert-policies": - enable_alert_policies(project_name(), enable=True, filter_=args.filter) - - elif args.command == "disable-alert-policies": - enable_alert_policies(project_name(), enable=False, filter_=args.filter) - - elif args.command == "replace-notification-channels": - replace_notification_channels( - project_name(), args.alert_policy_id, args.notification_channel_id - ) - - elif args.command == "backup": - backup(project_name(), args.backup_to_filename) - - elif args.command == "restore": - restore(project_name(), args.restore_from_filename) diff --git a/samples/snippets/v3/alerts-client/snippets_test.py b/samples/snippets/v3/alerts-client/snippets_test.py deleted file mode 100644 index b1c62af2..00000000 --- a/samples/snippets/v3/alerts-client/snippets_test.py +++ /dev/null @@ -1,229 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -from datetime import datetime -import random -import string -import time - -from google.api_core.exceptions import Aborted -from google.api_core.exceptions import DeadlineExceeded -from google.api_core.exceptions import NotFound -from google.api_core.exceptions import ServiceUnavailable -from google.cloud import monitoring_v3 -import pytest -from retrying import retry - -import snippets - - -# We assume we have access to good randomness source. -random.seed() - - -def random_name(length): - return "".join([random.choice(string.ascii_lowercase) for i in range(length)]) - - -def retry_on_exceptions(exception): - return isinstance(exception, (Aborted, ServiceUnavailable, DeadlineExceeded)) - - -def delay_on_aborted(err, *args): - if retry_on_exceptions(err[1]): - # add randomness for avoiding continuous conflict - time.sleep(5 + (random.randint(0, 9) * 0.1)) - return True - return False - - -class PochanFixture: - """A test fixture that creates an alert POlicy and a notification CHANnel, - hence the name, pochan. - """ - - def __init__(self): - self.project_id = snippets.project_id() - self.project_name = snippets.project_name() - self.alert_policy_client = monitoring_v3.AlertPolicyServiceClient() - self.notification_channel_client = ( - monitoring_v3.NotificationChannelServiceClient() - ) - - # delete all existing policies older than 1 hour prior to testing - for policy in self.alert_policy_client.list_alert_policies(name=self.project_name): - seconds_since_creation = datetime.timestamp(datetime.utcnow())-datetime.timestamp(policy.creation_record.mutate_time) - if seconds_since_creation > 3600: - try: - self.alert_policy_client.delete_alert_policy( - name=policy.name - ) - except NotFound: - print("Ignored NotFound when deleting a policy.") - - def __enter__(self): - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_on_exceptions, - ) - def setup(): - # Create a policy. - json = open("test_alert_policy.json").read() - policy = monitoring_v3.AlertPolicy.from_json(json) - policy.display_name = "snippets-test-" + random_name(10) - self.alert_policy = self.alert_policy_client.create_alert_policy( - name=self.project_name, alert_policy=policy - ) - # Create a notification channel. - json = open("test_notification_channel.json").read() - notification_channel = monitoring_v3.NotificationChannel.from_json(json) - notification_channel.display_name = "snippets-test-" + random_name(10) - self.notification_channel = ( - self.notification_channel_client.create_notification_channel( - name=self.project_name, notification_channel=notification_channel - ) - ) - - setup() - return self - - def __exit__(self, type, value, traceback): - # Delete the policy and channel we created. - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_on_exceptions, - ) - def teardown(): - try: - self.alert_policy_client.delete_alert_policy( - name=self.alert_policy.name - ) - except NotFound: - print("Ignored NotFound when deleting a policy.") - try: - if self.notification_channel.name: - self.notification_channel_client.delete_notification_channel( - self.notification_channel.name - ) - except NotFound: - print("Ignored NotFound when deleting a channel.") - - teardown() - - -@pytest.fixture(scope="session") -def pochan(): - with PochanFixture() as pochan: - yield pochan - - -def test_list_alert_policies(capsys, pochan): - # Query snippets.list_alert_policies() for up to 50 seconds - # to allow the newly created policy to appear in the list. - retry = 5 - while retry: - snippets.list_alert_policies(pochan.project_name) - out, _ = capsys.readouterr() - if pochan.alert_policy.display_name in out: - break - retry = retry - 1 - time.sleep(10) - - assert retry > 0 - - -@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) -def test_enable_alert_policies(capsys, pochan): - # These sleep calls are for mitigating the following error: - # "409 Too many concurrent edits to the project configuration. - # Please try again." - # Having multiple projects will void these `sleep()` calls. - # See also #3310 - time.sleep(2) - snippets.enable_alert_policies(pochan.project_name, True, "name='{}'".format(pochan.alert_policy.name)) - out, _ = capsys.readouterr() - assert ( - "Enabled {0}".format(pochan.project_name) in out - or "{} is already enabled".format(pochan.alert_policy.name) in out - ) - - time.sleep(2) - snippets.enable_alert_policies(pochan.project_name, False, "name='{}'".format(pochan.alert_policy.name)) - out, _ = capsys.readouterr() - assert ( - "Disabled {}".format(pochan.project_name) in out - or "{} is already disabled".format(pochan.alert_policy.name) in out - ) - - -@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) -def test_replace_channels(capsys, pochan): - alert_policy_id = pochan.alert_policy.name.split("/")[-1] - notification_channel_id = pochan.notification_channel.name.split("/")[-1] - - # This sleep call is for mitigating the following error: - # "409 Too many concurrent edits to the project configuration. - # Please try again." - # Having multiple projects will void this `sleep()` call. - # See also #3310 - time.sleep(2) - snippets.replace_notification_channels( - pochan.project_name, alert_policy_id, [notification_channel_id] - ) - out, _ = capsys.readouterr() - assert "Updated {0}".format(pochan.alert_policy.name) in out - - -@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) -def test_backup_and_restore(capsys, pochan): - # These sleep calls are for mitigating the following error: - # "409 Too many concurrent edits to the project configuration. - # Please try again." - # Having multiple projects will void this `sleep()` call. - # See also #3310 - time.sleep(2) - snippets.backup(pochan.project_name, "backup.json") - out, _ = capsys.readouterr() - - time.sleep(2) - snippets.restore(pochan.project_name, "backup.json") - out, _ = capsys.readouterr() - assert "Updated {0}".format(pochan.alert_policy.name) in out - assert ( - "Updating channel {0}".format(pochan.notification_channel.display_name) in out - ) - - -@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5) -def test_delete_channels(capsys, pochan): - notification_channel_id = pochan.notification_channel.name.split("/")[-1] - - # This sleep call is for mitigating the following error: - # "409 Too many concurrent edits to the project configuration. - # Please try again." - # Having multiple projects will void these `sleep()` calls. - # See also #3310 - time.sleep(2) - snippets.delete_notification_channels( - pochan.project_name, [notification_channel_id], force=True - ) - out, _ = capsys.readouterr() - assert "{0} deleted".format(notification_channel_id) in out - pochan.notification_channel.name = "" # So teardown is not tried diff --git a/samples/snippets/v3/alerts-client/test_alert_policy.json b/samples/snippets/v3/alerts-client/test_alert_policy.json deleted file mode 100644 index d728949f..00000000 --- a/samples/snippets/v3/alerts-client/test_alert_policy.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "displayName": "test_alert_policy.json", - "combiner": "OR", - "conditions": [ - { - "conditionThreshold": { - "filter": "metric.label.state=\"blocked\" AND metric.type=\"agent.googleapis.com/processes/count_by_state\" AND resource.type=\"gce_instance\"", - "comparison": "COMPARISON_GT", - "thresholdValue": 100, - "duration": "900s", - "trigger": { - "percent": 0 - }, - "aggregations": [ - { - "alignmentPeriod": "60s", - "perSeriesAligner": "ALIGN_MEAN", - "crossSeriesReducer": "REDUCE_MEAN", - "groupByFields": [ - "project", - "resource.label.instance_id", - "resource.label.zone" - ] - } - ] - }, - "displayName": "test_alert_policy.json" - } - ], - "enabled": false -} \ No newline at end of file diff --git a/samples/snippets/v3/alerts-client/test_notification_channel.json b/samples/snippets/v3/alerts-client/test_notification_channel.json deleted file mode 100644 index 6a0d53c0..00000000 --- a/samples/snippets/v3/alerts-client/test_notification_channel.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "type": "email", - "displayName": "Email joe.", - "description": "test_notification_channel.json", - "labels": { - "email_address": "joe@example.com" - }, - "userLabels": { - "office": "california_westcoast_usa", - "division": "fulfillment", - "role": "operations", - "level": "5" - }, - "enabled": true -} \ No newline at end of file diff --git a/samples/snippets/v3/cloud-client/README.rst b/samples/snippets/v3/cloud-client/README.rst deleted file mode 100644 index 280f9c4e..00000000 --- a/samples/snippets/v3/cloud-client/README.rst +++ /dev/null @@ -1,147 +0,0 @@ -.. This file is automatically generated. Do not edit this file directly. - -Google Stackdriver Monitoring API Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/README.rst - - -This directory contains samples for Google Stackdriver Monitoring API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch - and many others. Stackdriver ingests that data and generates insights - via dashboards, charts, and alerts. - - - - -.. _Google Stackdriver Monitoring API: https://cloud.google.com/monitoring/docs/ - -To run the sample, you need to enable the API at: https://console.cloud.google.com/apis/library/monitoring.googleapis.com - -To run the sample, you need to have `Monitoring Admin` role. - - -Please visit [the Cloud Console UI of this API](https://console.cloud.google.com/monitoring) and create a new Workspace with the same name of your Cloud project. - - -Setup -------------------------------------------------------------------------------- - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - -Samples -------------------------------------------------------------------------------- - -Quickstart -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/quickstart.py,monitoring/api/v3/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python quickstart.py - - -Snippets -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/snippets.py,monitoring/api/v3/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python snippets.py - - usage: snippets.py [-h] - {create-metric-descriptor,list-metric-descriptors,get-metric-descriptor,delete-metric-descriptor,list-resources,get-resource,write-time-series,list-time-series,list-time-series-header,list-time-series-reduce,list-time-series-aggregate} - ... - - Demonstrates Monitoring API operations. - - positional arguments: - {create-metric-descriptor,list-metric-descriptors,get-metric-descriptor,delete-metric-descriptor,list-resources,get-resource,write-time-series,list-time-series,list-time-series-header,list-time-series-reduce,list-time-series-aggregate} - create-metric-descriptor - list-metric-descriptors - get-metric-descriptor - delete-metric-descriptor - list-resources - get-resource - write-time-series - list-time-series - list-time-series-header - list-time-series-reduce - list-time-series-aggregate - - optional arguments: - -h, --help show this help message and exit - - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/samples/snippets/v3/cloud-client/README.rst.in b/samples/snippets/v3/cloud-client/README.rst.in deleted file mode 100644 index 0ab6b225..00000000 --- a/samples/snippets/v3/cloud-client/README.rst.in +++ /dev/null @@ -1,35 +0,0 @@ -# This file is used to generate README.rst - -product: - name: Google Stackdriver Monitoring API - short_name: Stackdriver Monitoring API - url: https://cloud.google.com/monitoring/docs/ - description: > - Stackdriver Monitoring collects metrics, events, and metadata from Google - Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, - application instrumentation, and a variety of common application - components including Cassandra, Nginx, Apache Web Server, Elasticsearch - and many others. Stackdriver ingests that data and generates insights - via dashboards, charts, and alerts. - -required_api_url: https://console.cloud.google.com/apis/library/monitoring.googleapis.com -required_role: Monitoring Admin -other_required_steps: > - Please visit [the Cloud Console UI of this - API](https://console.cloud.google.com/monitoring) and create a new - Workspace with the same name of your Cloud project. - -setup: -- auth -- install_deps - -samples: -- name: Quickstart - file: quickstart.py -- name: Snippets - file: snippets.py - show_help: true - -cloud_client_library: true - -folder: monitoring/api/v3/cloud-client diff --git a/samples/snippets/v3/cloud-client/noxfile.py b/samples/snippets/v3/cloud-client/noxfile.py deleted file mode 100644 index 0398d72f..00000000 --- a/samples/snippets/v3/cloud-client/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) - elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/snippets/v3/cloud-client/noxfile_config.py b/samples/snippets/v3/cloud-client/noxfile_config.py deleted file mode 100644 index 664c5830..00000000 --- a/samples/snippets/v3/cloud-client/noxfile_config.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Default TEST_CONFIG_OVERRIDE for python repos. - -# You can copy this file into your directory, then it will be inported from -# the noxfile.py. - -# The source of truth: -# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py - -TEST_CONFIG_OVERRIDE = { - # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - - # Declare optional test sessions you want to opt-in. Currently we - # have the following optional test sessions: - # 'cloud_run' # Test session for Cloud Run application. - 'opt_in_sessions': [], - - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', - 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - 'envs': {}, -} diff --git a/samples/snippets/v3/cloud-client/quickstart.py b/samples/snippets/v3/cloud-client/quickstart.py deleted file mode 100644 index 78088dba..00000000 --- a/samples/snippets/v3/cloud-client/quickstart.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def run_quickstart(project=""): - # [START monitoring_quickstart] - from google.cloud import monitoring_v3 - - import time - - client = monitoring_v3.MetricServiceClient() - # project = 'my-project' # TODO: Update to your project ID. - project_name = f"projects/{project}" - - series = monitoring_v3.TimeSeries() - series.metric.type = "custom.googleapis.com/my_metric" - series.resource.type = "gce_instance" - series.resource.labels["instance_id"] = "1234567890123456789" - series.resource.labels["zone"] = "us-central1-f" - now = time.time() - seconds = int(now) - nanos = int((now - seconds) * 10 ** 9) - interval = monitoring_v3.TimeInterval( - {"end_time": {"seconds": seconds, "nanos": nanos}} - ) - point = monitoring_v3.Point({"interval": interval, "value": {"double_value": 3.14}}) - series.points = [point] - client.create_time_series(request={"name": project_name, "time_series": [series]}) - print("Successfully wrote time series.") - # [END monitoring_quickstart] - - -if __name__ == "__main__": - run_quickstart() diff --git a/samples/snippets/v3/cloud-client/requirements-test.txt b/samples/snippets/v3/cloud-client/requirements-test.txt deleted file mode 100644 index 46606d37..00000000 --- a/samples/snippets/v3/cloud-client/requirements-test.txt +++ /dev/null @@ -1,3 +0,0 @@ -backoff==2.2.1 -pytest==7.1.3 -mock==4.0.3 diff --git a/samples/snippets/v3/cloud-client/requirements.txt b/samples/snippets/v3/cloud-client/requirements.txt deleted file mode 100644 index 7d155e14..00000000 --- a/samples/snippets/v3/cloud-client/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -google-cloud-monitoring==2.11.2 diff --git a/samples/snippets/v3/cloud-client/snippets.py b/samples/snippets/v3/cloud-client/snippets.py deleted file mode 100644 index aa9eae62..00000000 --- a/samples/snippets/v3/cloud-client/snippets.py +++ /dev/null @@ -1,356 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import os -import pprint -import time -import uuid - -PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] - - -def create_metric_descriptor(project_id): - # [START monitoring_create_metric] - from google.api import label_pb2 as ga_label - from google.api import metric_pb2 as ga_metric - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - descriptor = ga_metric.MetricDescriptor() - descriptor.type = "custom.googleapis.com/my_metric" + str(uuid.uuid4()) - descriptor.metric_kind = ga_metric.MetricDescriptor.MetricKind.GAUGE - descriptor.value_type = ga_metric.MetricDescriptor.ValueType.DOUBLE - descriptor.description = "This is a simple example of a custom metric." - - labels = ga_label.LabelDescriptor() - labels.key = "TestLabel" - labels.value_type = ga_label.LabelDescriptor.ValueType.STRING - labels.description = "This is a test label" - descriptor.labels.append(labels) - - descriptor = client.create_metric_descriptor( - name=project_name, metric_descriptor=descriptor - ) - print("Created {}.".format(descriptor.name)) - # [END monitoring_create_metric] - - -def delete_metric_descriptor(descriptor_name): - # [START monitoring_delete_metric] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - client.delete_metric_descriptor(name=descriptor_name) - print("Deleted metric descriptor {}.".format(descriptor_name)) - # [END monitoring_delete_metric] - - -def write_time_series(project_id): - # [START monitoring_write_timeseries] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - - series = monitoring_v3.TimeSeries() - series.metric.type = "custom.googleapis.com/my_metric" + str(uuid.uuid4()) - series.resource.type = "gce_instance" - series.resource.labels["instance_id"] = "1234567890123456789" - series.resource.labels["zone"] = "us-central1-f" - series.metric.labels["TestLabel"] = "My Label Data" - now = time.time() - seconds = int(now) - nanos = int((now - seconds) * 10 ** 9) - interval = monitoring_v3.TimeInterval( - {"end_time": {"seconds": seconds, "nanos": nanos}} - ) - point = monitoring_v3.Point({"interval": interval, "value": {"double_value": 3.14}}) - series.points = [point] - client.create_time_series(name=project_name, time_series=[series]) - # [END monitoring_write_timeseries] - - -def list_time_series(project_id): - # [START monitoring_read_timeseries_simple] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - interval = monitoring_v3.TimeInterval() - - now = time.time() - seconds = int(now) - nanos = int((now - seconds) * 10 ** 9) - interval = monitoring_v3.TimeInterval( - { - "end_time": {"seconds": seconds, "nanos": nanos}, - "start_time": {"seconds": (seconds - 1200), "nanos": nanos}, - } - ) - - results = client.list_time_series( - request={ - "name": project_name, - "filter": 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', - "interval": interval, - "view": monitoring_v3.ListTimeSeriesRequest.TimeSeriesView.FULL, - } - ) - for result in results: - print(result) - # [END monitoring_read_timeseries_simple] - - -def list_time_series_header(project_id): - # [START monitoring_read_timeseries_fields] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - now = time.time() - seconds = int(now) - nanos = int((now - seconds) * 10 ** 9) - interval = monitoring_v3.TimeInterval( - { - "end_time": {"seconds": seconds, "nanos": nanos}, - "start_time": {"seconds": (seconds - 1200), "nanos": nanos}, - } - ) - results = client.list_time_series( - request={ - "name": project_name, - "filter": 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', - "interval": interval, - "view": monitoring_v3.ListTimeSeriesRequest.TimeSeriesView.HEADERS, - } - ) - for result in results: - print(result) - # [END monitoring_read_timeseries_fields] - - -def list_time_series_aggregate(project_id): - # [START monitoring_read_timeseries_align] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - - now = time.time() - seconds = int(now) - nanos = int((now - seconds) * 10 ** 9) - interval = monitoring_v3.TimeInterval( - { - "end_time": {"seconds": seconds, "nanos": nanos}, - "start_time": {"seconds": (seconds - 3600), "nanos": nanos}, - } - ) - aggregation = monitoring_v3.Aggregation( - { - "alignment_period": {"seconds": 1200}, # 20 minutes - "per_series_aligner": monitoring_v3.Aggregation.Aligner.ALIGN_MEAN, - } - ) - - results = client.list_time_series( - request={ - "name": project_name, - "filter": 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', - "interval": interval, - "view": monitoring_v3.ListTimeSeriesRequest.TimeSeriesView.FULL, - "aggregation": aggregation, - } - ) - for result in results: - print(result) - # [END monitoring_read_timeseries_align] - - -def list_time_series_reduce(project_id): - # [START monitoring_read_timeseries_reduce] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - - now = time.time() - seconds = int(now) - nanos = int((now - seconds) * 10 ** 9) - interval = monitoring_v3.TimeInterval( - { - "end_time": {"seconds": seconds, "nanos": nanos}, - "start_time": {"seconds": (seconds - 3600), "nanos": nanos}, - } - ) - aggregation = monitoring_v3.Aggregation( - { - "alignment_period": {"seconds": 1200}, # 20 minutes - "per_series_aligner": monitoring_v3.Aggregation.Aligner.ALIGN_MEAN, - "cross_series_reducer": monitoring_v3.Aggregation.Reducer.REDUCE_MEAN, - "group_by_fields": ["resource.zone"], - } - ) - - results = client.list_time_series( - request={ - "name": project_name, - "filter": 'metric.type = "compute.googleapis.com/instance/cpu/utilization"', - "interval": interval, - "view": monitoring_v3.ListTimeSeriesRequest.TimeSeriesView.FULL, - "aggregation": aggregation, - } - ) - for result in results: - print(result) - # [END monitoring_read_timeseries_reduce] - - -def list_metric_descriptors(project_id): - # [START monitoring_list_descriptors] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - for descriptor in client.list_metric_descriptors(name=project_name): - print(descriptor.type) - # [END monitoring_list_descriptors] - - -def list_monitored_resources(project_id): - # [START monitoring_list_resources] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - project_name = f"projects/{project_id}" - resource_descriptors = client.list_monitored_resource_descriptors(name=project_name) - for descriptor in resource_descriptors: - print(descriptor.type) - # [END monitoring_list_resources] - - -def get_monitored_resource_descriptor(project_id, resource_type_name): - # [START monitoring_get_resource] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - resource_path = ( - f"projects/{project_id}/monitoredResourceDescriptors/{resource_type_name}" - ) - pprint.pprint(client.get_monitored_resource_descriptor(name=resource_path)) - # [END monitoring_get_resource] - - -def get_metric_descriptor(metric_name): - # [START monitoring_get_descriptor] - from google.cloud import monitoring_v3 - - client = monitoring_v3.MetricServiceClient() - descriptor = client.get_metric_descriptor(name=metric_name) - pprint.pprint(descriptor) - # [END monitoring_get_descriptor] - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Demonstrates Monitoring API operations." - ) - - subparsers = parser.add_subparsers(dest="command") - - create_metric_descriptor_parser = subparsers.add_parser( - "create-metric-descriptor", help=create_metric_descriptor.__doc__ - ) - - list_metric_descriptor_parser = subparsers.add_parser( - "list-metric-descriptors", help=list_metric_descriptors.__doc__ - ) - - get_metric_descriptor_parser = subparsers.add_parser( - "get-metric-descriptor", help=get_metric_descriptor.__doc__ - ) - - get_metric_descriptor_parser.add_argument( - "--metric-type-name", - help="The metric type of the metric descriptor to see details about.", - required=True, - ) - - delete_metric_descriptor_parser = subparsers.add_parser( - "delete-metric-descriptor", help=list_metric_descriptors.__doc__ - ) - - delete_metric_descriptor_parser.add_argument( - "--metric-descriptor-name", help="Metric descriptor to delete", required=True - ) - - list_resources_parser = subparsers.add_parser( - "list-resources", help=list_monitored_resources.__doc__ - ) - - get_resource_parser = subparsers.add_parser( - "get-resource", help=get_monitored_resource_descriptor.__doc__ - ) - - get_resource_parser.add_argument( - "--resource-type-name", - help="Monitored resource to view more information about.", - required=True, - ) - - write_time_series_parser = subparsers.add_parser( - "write-time-series", help=write_time_series.__doc__ - ) - - list_time_series_parser = subparsers.add_parser( - "list-time-series", help=list_time_series.__doc__ - ) - - list_time_series_header_parser = subparsers.add_parser( - "list-time-series-header", help=list_time_series_header.__doc__ - ) - - read_time_series_reduce = subparsers.add_parser( - "list-time-series-reduce", help=list_time_series_reduce.__doc__ - ) - - read_time_series_aggregate = subparsers.add_parser( - "list-time-series-aggregate", help=list_time_series_aggregate.__doc__ - ) - - args = parser.parse_args() - - if args.command == "create-metric-descriptor": - create_metric_descriptor(PROJECT_ID) - if args.command == "list-metric-descriptors": - list_metric_descriptors(PROJECT_ID) - if args.command == "get-metric-descriptor": - get_metric_descriptor(args.metric_type_name) - if args.command == "delete-metric-descriptor": - delete_metric_descriptor(args.metric_descriptor_name) - if args.command == "list-resources": - list_monitored_resources(PROJECT_ID) - if args.command == "get-resource": - get_monitored_resource_descriptor(PROJECT_ID, args.resource_type_name) - if args.command == "write-time-series": - write_time_series(PROJECT_ID) - if args.command == "list-time-series": - list_time_series(PROJECT_ID) - if args.command == "list-time-series-header": - list_time_series_header(PROJECT_ID) - if args.command == "list-time-series-reduce": - list_time_series_reduce(PROJECT_ID) - if args.command == "list-time-series-aggregate": - list_time_series_aggregate(PROJECT_ID) diff --git a/samples/snippets/v3/cloud-client/snippets_test.py b/samples/snippets/v3/cloud-client/snippets_test.py deleted file mode 100644 index d6c7d07a..00000000 --- a/samples/snippets/v3/cloud-client/snippets_test.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import re - -import backoff -from google.api_core.exceptions import InternalServerError -from google.api_core.exceptions import NotFound -import pytest - -import snippets - - -PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] - - -@pytest.fixture(scope="function") -def custom_metric_descriptor(capsys): - snippets.create_metric_descriptor(PROJECT_ID) - out, _ = capsys.readouterr() - match = re.search(r"Created (.*)\.", out) - metric_name = match.group(1) - yield metric_name - - # teardown - try: - snippets.delete_metric_descriptor(metric_name) - except NotFound: - print("Metric descriptor already deleted") - - -@pytest.fixture(scope="module") -def write_time_series(): - @backoff.on_exception(backoff.expo, InternalServerError, max_time=120) - def write(): - snippets.write_time_series(PROJECT_ID) - - write() - yield - - -def test_get_delete_metric_descriptor(capsys, custom_metric_descriptor): - try: - - @backoff.on_exception(backoff.expo, (AssertionError, NotFound), max_time=60) - def eventually_consistent_test(): - snippets.get_metric_descriptor(custom_metric_descriptor) - out, _ = capsys.readouterr() - assert "DOUBLE" in out - - eventually_consistent_test() - finally: - snippets.delete_metric_descriptor(custom_metric_descriptor) - out, _ = capsys.readouterr() - assert "Deleted metric" in out - - -def test_list_metric_descriptors(capsys): - snippets.list_metric_descriptors(PROJECT_ID) - out, _ = capsys.readouterr() - assert "logging.googleapis.com/byte_count" in out - - -def test_list_resources(capsys): - snippets.list_monitored_resources(PROJECT_ID) - out, _ = capsys.readouterr() - assert "pubsub_topic" in out - - -def test_get_resources(capsys): - snippets.get_monitored_resource_descriptor(PROJECT_ID, "pubsub_topic") - out, _ = capsys.readouterr() - assert "A topic in Google Cloud Pub/Sub" in out - - -def test_list_time_series(capsys, write_time_series): - snippets.list_time_series(PROJECT_ID) - out, _ = capsys.readouterr() - assert "gce_instance" in out - - -def test_list_time_series_header(capsys, write_time_series): - snippets.list_time_series_header(PROJECT_ID) - out, _ = capsys.readouterr() - assert "gce_instance" in out - - -def test_list_time_series_aggregate(capsys, write_time_series): - snippets.list_time_series_aggregate(PROJECT_ID) - out, _ = capsys.readouterr() - assert "points" in out - assert "interval" in out - assert "start_time" in out - assert "end_time" in out - - -def test_list_time_series_reduce(capsys, write_time_series): - snippets.list_time_series_reduce(PROJECT_ID) - out, _ = capsys.readouterr() - assert "points" in out - assert "interval" in out - assert "start_time" in out - assert "end_time" in out diff --git a/samples/snippets/v3/uptime-check-client/README.rst b/samples/snippets/v3/uptime-check-client/README.rst deleted file mode 100644 index 30046bde..00000000 --- a/samples/snippets/v3/uptime-check-client/README.rst +++ /dev/null @@ -1,115 +0,0 @@ -.. This file is automatically generated. Do not edit this file directly. - -Google Stackdriver Uptime Checks API Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/uptime-check-client/README.rst - - -This directory contains samples for Google Stackdriver Uptime Checks API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver's Uptime Checks API allows you to create, delete, and list your project's Uptime Checks. - - - - -.. _Google Stackdriver Uptime Checks API: https://cloud.google.com/monitoring/uptime-checks/management - -Setup -------------------------------------------------------------------------------- - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - -Samples -------------------------------------------------------------------------------- - -Snippets -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/uptime-check-client/snippets.py,monitoring/api/v3/uptime-check-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python snippets.py - - usage: snippets.py [-h] - {list-uptime-check-configs,list-uptime-check-ips,create-uptime-check,get-uptime-check-config,delete-uptime-check-config} - ... - - Demonstrates Uptime Check API operations. - - positional arguments: - {list-uptime-check-configs,list-uptime-check-ips,create-uptime-check,get-uptime-check-config,delete-uptime-check-config} - list-uptime-check-configs - list-uptime-check-ips - create-uptime-check - get-uptime-check-config - delete-uptime-check-config - - optional arguments: - -h, --help show this help message and exit - - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/samples/snippets/v3/uptime-check-client/README.rst.in b/samples/snippets/v3/uptime-check-client/README.rst.in deleted file mode 100644 index 1174962e..00000000 --- a/samples/snippets/v3/uptime-check-client/README.rst.in +++ /dev/null @@ -1,26 +0,0 @@ -# This file is used to generate README.rst - -product: - name: Google Stackdriver Uptime Checks API - short_name: Stackdriver Uptime Checks API - url: https://cloud.google.com/monitoring/uptime-checks/management - description: > - Stackdriver Monitoring collects metrics, events, and metadata from Google - Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, - application instrumentation, and a variety of common application - components including Cassandra, Nginx, Apache Web Server, Elasticsearch - and many others. Stackdriver's Uptime Checks API allows you to create, - delete, and list your project's Uptime Checks. - -setup: -- auth -- install_deps - -samples: -- name: Snippets - file: snippets.py - show_help: true - -cloud_client_library: true - -folder: monitoring/api/v3/uptime-check-client \ No newline at end of file diff --git a/samples/snippets/v3/uptime-check-client/noxfile.py b/samples/snippets/v3/uptime-check-client/noxfile.py deleted file mode 100644 index 0398d72f..00000000 --- a/samples/snippets/v3/uptime-check-client/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) - elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/snippets/v3/uptime-check-client/requirements-test.txt b/samples/snippets/v3/uptime-check-client/requirements-test.txt deleted file mode 100644 index 3bc76cee..00000000 --- a/samples/snippets/v3/uptime-check-client/requirements-test.txt +++ /dev/null @@ -1,2 +0,0 @@ -backoff==2.2.1 -pytest==7.1.3 diff --git a/samples/snippets/v3/uptime-check-client/requirements.txt b/samples/snippets/v3/uptime-check-client/requirements.txt deleted file mode 100644 index db5d369c..00000000 --- a/samples/snippets/v3/uptime-check-client/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -google-cloud-monitoring==2.11.2 -tabulate==0.8.10 diff --git a/samples/snippets/v3/uptime-check-client/snippets.py b/samples/snippets/v3/uptime-check-client/snippets.py deleted file mode 100644 index 0970b8e7..00000000 --- a/samples/snippets/v3/uptime-check-client/snippets.py +++ /dev/null @@ -1,260 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import argparse -import os -import pprint - -from google.cloud import monitoring_v3 -from google.protobuf import field_mask_pb2 -import tabulate - - -# [START monitoring_uptime_check_create] -def create_uptime_check_config_get(project_name, host_name=None, display_name=None): - config = monitoring_v3.UptimeCheckConfig() - config.display_name = display_name or "New GET uptime check" - config.monitored_resource = { - "type": "uptime_url", - "labels": {"host": host_name or "example.com"} - } - config.http_check = { - "request_method": monitoring_v3.UptimeCheckConfig.HttpCheck.RequestMethod.GET, - "path": "/", - "port": 80 - } - config.timeout = {"seconds": 10} - config.period = {"seconds": 300} - - client = monitoring_v3.UptimeCheckServiceClient() - new_config = client.create_uptime_check_config(request={"parent": project_name, "uptime_check_config": config}) - pprint.pprint(new_config) - return new_config - - -def create_uptime_check_config_post(project_name, host_name=None, display_name=None): - config = monitoring_v3.UptimeCheckConfig() - config.display_name = display_name or "New POST uptime check" - config.monitored_resource = { - "type": "uptime_url", - "labels": {"host": host_name or "example.com"} - } - config.http_check = { - "request_method": monitoring_v3.UptimeCheckConfig.HttpCheck.RequestMethod.POST, - "content_type": monitoring_v3.UptimeCheckConfig.HttpCheck.ContentType.URL_ENCODED, - "body": "foo=bar".encode("utf-8"), - "path": "/", - "port": 80 - } - config.timeout = {"seconds": 10} - config.period = {"seconds": 300} - - client = monitoring_v3.UptimeCheckServiceClient() - new_config = client.create_uptime_check_config(request={"parent": project_name, "uptime_check_config": config}) - pprint.pprint(new_config) - return new_config - - -# [END monitoring_uptime_check_create] - -# [START monitoring_uptime_check_update] -def update_uptime_check_config( - config_name, new_display_name=None, new_http_check_path=None -): - client = monitoring_v3.UptimeCheckServiceClient() - config = client.get_uptime_check_config(request={"name": config_name}) - field_mask = field_mask_pb2.FieldMask() - if new_display_name: - field_mask.paths.append("display_name") - config.display_name = new_display_name - if new_http_check_path: - field_mask.paths.append("http_check.path") - config.http_check.path = new_http_check_path - client.update_uptime_check_config(request={"uptime_check_config": config, "update_mask": field_mask}) - - -# [END monitoring_uptime_check_update] - - -# [START monitoring_uptime_check_list_configs] -def list_uptime_check_configs(project_name): - client = monitoring_v3.UptimeCheckServiceClient() - configs = client.list_uptime_check_configs(request={"parent": project_name}) - - for config in configs: - pprint.pprint(config) - - -# [END monitoring_uptime_check_list_configs] - - -# [START monitoring_uptime_check_list_ips] -def list_uptime_check_ips(): - client = monitoring_v3.UptimeCheckServiceClient() - ips = client.list_uptime_check_ips(request={}) - print( - tabulate.tabulate( - [(ip.region, ip.location, ip.ip_address) for ip in ips], - ("region", "location", "ip_address"), - ) - ) - - -# [END monitoring_uptime_check_list_ips] - - -# [START monitoring_uptime_check_get] -def get_uptime_check_config(config_name): - client = monitoring_v3.UptimeCheckServiceClient() - config = client.get_uptime_check_config(request={"name": config_name}) - pprint.pprint(config) - - -# [END monitoring_uptime_check_get] - - -# [START monitoring_uptime_check_delete] -# `config_name` is the `name` field of an UptimeCheckConfig. -# See https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.uptimeCheckConfigs#UptimeCheckConfig. -def delete_uptime_check_config(config_name): - client = monitoring_v3.UptimeCheckServiceClient() - client.delete_uptime_check_config(request={"name": config_name}) - print("Deleted ", config_name) - - -# [END monitoring_uptime_check_delete] - - -class MissingProjectIdError(Exception): - pass - - -def project_id(): - """Retreieves the project id from the environment variable. - - Raises: - MissingProjectIdError -- When not set. - - Returns: - str -- the project name - """ - project_id = os.environ["GOOGLE_CLOUD_PROJECT"] - - if not project_id: - raise MissingProjectIdError( - "Set the environment variable " - + "GCLOUD_PROJECT to your Google Cloud Project Id." - ) - return project_id - - -def project_name(): - return "projects/" + project_id() - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser( - description="Demonstrates Uptime Check API operations." - ) - - subparsers = parser.add_subparsers(dest="command") - - list_uptime_check_configs_parser = subparsers.add_parser( - "list-uptime-check-configs", help=list_uptime_check_configs.__doc__ - ) - - list_uptime_check_ips_parser = subparsers.add_parser( - "list-uptime-check-ips", help=list_uptime_check_ips.__doc__ - ) - - create_uptime_check_config_get_parser = subparsers.add_parser( - "create-uptime-check-get", help=create_uptime_check_config_get.__doc__ - ) - create_uptime_check_config_get_parser.add_argument( - "-d", "--display_name", required=False, - ) - create_uptime_check_config_get_parser.add_argument( - "-o", "--host_name", required=False, - ) - - create_uptime_check_config_post_parser = subparsers.add_parser( - "create-uptime-check-post", help=create_uptime_check_config_post.__doc__ - ) - create_uptime_check_config_post_parser.add_argument( - "-d", "--display_name", required=False, - ) - create_uptime_check_config_post_parser.add_argument( - "-o", "--host_name", required=False, - ) - - get_uptime_check_config_parser = subparsers.add_parser( - "get-uptime-check-config", help=get_uptime_check_config.__doc__ - ) - get_uptime_check_config_parser.add_argument( - "-m", "--name", required=True, - ) - - delete_uptime_check_config_parser = subparsers.add_parser( - "delete-uptime-check-config", help=delete_uptime_check_config.__doc__ - ) - delete_uptime_check_config_parser.add_argument( - "-m", "--name", required=True, - ) - - update_uptime_check_config_parser = subparsers.add_parser( - "update-uptime-check-config", help=update_uptime_check_config.__doc__ - ) - update_uptime_check_config_parser.add_argument( - "-m", "--name", required=True, - ) - update_uptime_check_config_parser.add_argument( - "-d", "--display_name", required=False, - ) - update_uptime_check_config_parser.add_argument( - "-p", "--uptime_check_path", required=False, - ) - - args = parser.parse_args() - - if args.command == "list-uptime-check-configs": - list_uptime_check_configs(project_name()) - - elif args.command == "list-uptime-check-ips": - list_uptime_check_ips() - - elif args.command == "create-uptime-check-get": - create_uptime_check_config_get( - project_name(), args.host_name, args.display_name - ) - elif args.command == "create-uptime-check-post": - create_uptime_check_config_post( - project_name(), args.host_name, args.display_name - ) - - elif args.command == "get-uptime-check-config": - get_uptime_check_config(args.name) - - elif args.command == "delete-uptime-check-config": - delete_uptime_check_config(args.name) - - elif args.command == "update-uptime-check-config": - if not args.display_name and not args.uptime_check_path: - print("Nothing to update. Pass --display_name or " "--uptime_check_path.") - else: - update_uptime_check_config( - args.name, args.display_name, args.uptime_check_path - ) diff --git a/samples/snippets/v3/uptime-check-client/snippets_test.py b/samples/snippets/v3/uptime-check-client/snippets_test.py deleted file mode 100644 index 81d2b247..00000000 --- a/samples/snippets/v3/uptime-check-client/snippets_test.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import random -import string - -import backoff -from google.api_core.exceptions import DeadlineExceeded -import pytest - -import snippets - - -def random_name(length): - return "".join([random.choice(string.ascii_lowercase) for i in range(length)]) - - -class UptimeFixture: - """A test fixture that creates uptime check config. - """ - - def __init__(self): - self.project_id = snippets.project_id() - self.project_name = snippets.project_name() - - def __enter__(self): - # Create an uptime check config (GET request). - self.config_get = snippets.create_uptime_check_config_get( - self.project_name, display_name=random_name(10) - ) - # Create an uptime check config (POST request). - self.config_post = snippets.create_uptime_check_config_post( - self.project_name, display_name=random_name(10) - ) - return self - - def __exit__(self, type, value, traceback): - # Delete the config. - snippets.delete_uptime_check_config(self.config_get.name) - snippets.delete_uptime_check_config(self.config_post.name) - - -@pytest.fixture(scope="session") -def uptime(): - with UptimeFixture() as uptime: - yield uptime - - -def test_create_and_delete(capsys): - # create and delete happen in uptime fixture. - with UptimeFixture(): - pass - - -def test_update_uptime_config(capsys): - # create and delete happen in uptime fixture. - new_display_name = random_name(10) - new_uptime_check_path = "/" + random_name(10) - with UptimeFixture() as fixture: - # We sometimes see the permission error saying the resource - # may not exist. Weirdly DeadlineExceeded instance is raised - # in this case. - @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=120) - def call_sample(): - snippets.update_uptime_check_config( - fixture.config_get.name, new_display_name, new_uptime_check_path) - - call_sample() - - out, _ = capsys.readouterr() - snippets.get_uptime_check_config(fixture.config_get.name) - out, _ = capsys.readouterr() - assert new_display_name in out - assert new_uptime_check_path in out - - -def test_get_uptime_check_config(capsys, uptime): - snippets.get_uptime_check_config(uptime.config_get.name) - out, _ = capsys.readouterr() - assert uptime.config_get.display_name in out - - -def test_list_uptime_check_configs(capsys, uptime): - snippets.list_uptime_check_configs(uptime.project_name) - out, _ = capsys.readouterr() - assert uptime.config_get.display_name in out - - -def test_list_uptime_check_ips(capsys): - snippets.list_uptime_check_ips() - out, _ = capsys.readouterr() - assert "Singapore" in out diff --git a/setup.py b/setup.py index 3b2c26a7..33d7e797 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,32 +12,37 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# import io import os -import setuptools +import setuptools # type: ignore - -# Package metadata. +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-monitoring" + + description = "Stackdriver Monitoring API client library" -version = "2.11.3" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" + +version = {} +with open(os.path.join(package_root, "google/cloud/monitoring/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {"pandas": "pandas >= 0.17.1"} - -# Setup boilerplate below this line. +url = "https://github.com/googleapis/python-monitoring" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -44,20 +50,16 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. packages = [ package for package in setuptools.PEP420PackageFinder.find() if package.startswith("google") ] -# Determine which namespaces are needed. namespaces = ["google"] if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -66,7 +68,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/googleapis/python-monitoring", + url=url, classifiers=[ release_status, "Intended Audience :: Developers", @@ -75,16 +77,17 @@ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Operating System :: OS Independent", "Topic :: Internet", ], platforms="Posix; MacOS X; Windows", packages=packages, + python_requires=">=3.7", namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.7", - scripts=["scripts/fixup_monitoring_v3_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29b..ed7f9aed 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29b..ed7f9aed 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index b69e6917..f84d1b75 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.32.0 +google-api-core==1.33.2 proto-plus==1.22.0 -pandas==0.23.2 protobuf==3.19.5 +pandas==0.23.2 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index da93009b..ed7f9aed 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1,2 +1,6 @@ -# This constraints file is left inentionally empty -# so the latest version of dependencies is installed \ No newline at end of file +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index da93009b..ed7f9aed 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -1,2 +1,6 @@ -# This constraints file is left inentionally empty -# so the latest version of dependencies is installed \ No newline at end of file +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/unit/gapic/monitoring_v3/test_metric_service.py b/tests/unit/gapic/monitoring_v3/test_metric_service.py index 6263293d..0f5c34b7 100644 --- a/tests/unit/gapic/monitoring_v3/test_metric_service.py +++ b/tests/unit/gapic/monitoring_v3/test_metric_service.py @@ -1172,6 +1172,7 @@ def test_get_monitored_resource_descriptor(request_type, transport: str = "grpc" type="type_value", display_name="display_name_value", description="description_value", + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, ) response = client.get_monitored_resource_descriptor(request) @@ -1186,6 +1187,7 @@ def test_get_monitored_resource_descriptor(request_type, transport: str = "grpc" assert response.type == "type_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED def test_get_monitored_resource_descriptor_empty_call(): @@ -1231,6 +1233,7 @@ async def test_get_monitored_resource_descriptor_async( type="type_value", display_name="display_name_value", description="description_value", + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, ) ) response = await client.get_monitored_resource_descriptor(request) @@ -1246,6 +1249,7 @@ async def test_get_monitored_resource_descriptor_async( assert response.type == "type_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED @pytest.mark.asyncio @@ -1877,6 +1881,8 @@ def test_get_metric_descriptor(request_type, transport: str = "grpc"): unit="unit_value", description="description_value", display_name="display_name_value", + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=["monitored_resource_types_value"], ) response = client.get_metric_descriptor(request) @@ -1894,6 +1900,8 @@ def test_get_metric_descriptor(request_type, transport: str = "grpc"): assert response.unit == "unit_value" assert response.description == "description_value" assert response.display_name == "display_name_value" + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ["monitored_resource_types_value"] def test_get_metric_descriptor_empty_call(): @@ -1942,6 +1950,8 @@ async def test_get_metric_descriptor_async( unit="unit_value", description="description_value", display_name="display_name_value", + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=["monitored_resource_types_value"], ) ) response = await client.get_metric_descriptor(request) @@ -1960,6 +1970,8 @@ async def test_get_metric_descriptor_async( assert response.unit == "unit_value" assert response.description == "description_value" assert response.display_name == "display_name_value" + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ["monitored_resource_types_value"] @pytest.mark.asyncio @@ -2148,6 +2160,8 @@ def test_create_metric_descriptor(request_type, transport: str = "grpc"): unit="unit_value", description="description_value", display_name="display_name_value", + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=["monitored_resource_types_value"], ) response = client.create_metric_descriptor(request) @@ -2165,6 +2179,8 @@ def test_create_metric_descriptor(request_type, transport: str = "grpc"): assert response.unit == "unit_value" assert response.description == "description_value" assert response.display_name == "display_name_value" + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ["monitored_resource_types_value"] def test_create_metric_descriptor_empty_call(): @@ -2213,6 +2229,8 @@ async def test_create_metric_descriptor_async( unit="unit_value", description="description_value", display_name="display_name_value", + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + monitored_resource_types=["monitored_resource_types_value"], ) ) response = await client.create_metric_descriptor(request) @@ -2231,6 +2249,8 @@ async def test_create_metric_descriptor_async( assert response.unit == "unit_value" assert response.description == "description_value" assert response.display_name == "display_name_value" + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.monitored_resource_types == ["monitored_resource_types_value"] @pytest.mark.asyncio diff --git a/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py b/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py index bce38cdf..e979d915 100644 --- a/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py +++ b/tests/unit/gapic/monitoring_v3/test_notification_channel_service.py @@ -1217,6 +1217,7 @@ def test_get_notification_channel_descriptor(request_type, transport: str = "grp display_name="display_name_value", description="description_value", supported_tiers=[common.ServiceTier.SERVICE_TIER_BASIC], + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, ) response = client.get_notification_channel_descriptor(request) @@ -1232,6 +1233,7 @@ def test_get_notification_channel_descriptor(request_type, transport: str = "grp assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.supported_tiers == [common.ServiceTier.SERVICE_TIER_BASIC] + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED def test_get_notification_channel_descriptor_empty_call(): @@ -1278,6 +1280,7 @@ async def test_get_notification_channel_descriptor_async( display_name="display_name_value", description="description_value", supported_tiers=[common.ServiceTier.SERVICE_TIER_BASIC], + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, ) ) response = await client.get_notification_channel_descriptor(request) @@ -1294,6 +1297,7 @@ async def test_get_notification_channel_descriptor_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.supported_tiers == [common.ServiceTier.SERVICE_TIER_BASIC] + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED @pytest.mark.asyncio