diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 915699a4d462..7431bbdd10c0 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,203 +1,203 @@ { - "packages/google-ads-admanager": "0.2.1", - "packages/google-ads-marketingplatform-admin": "0.1.1", - "packages/google-ai-generativelanguage": "0.6.11", - "packages/google-analytics-admin": "0.23.1", - "packages/google-analytics-data": "0.18.14", + "packages/google-ads-admanager": "0.2.2", + "packages/google-ads-marketingplatform-admin": "0.1.2", + "packages/google-ai-generativelanguage": "0.6.12", + "packages/google-analytics-admin": "0.23.2", + "packages/google-analytics-data": "0.18.15", "packages/google-apps-card": "0.1.5", - "packages/google-apps-chat": "0.1.13", - "packages/google-apps-events-subscriptions": "0.1.3", - "packages/google-apps-meet": "0.1.9", + "packages/google-apps-chat": "0.1.14", + "packages/google-apps-events-subscriptions": "0.1.4", + "packages/google-apps-meet": "0.1.10", "packages/google-apps-script-type": "0.3.11", - "packages/google-area120-tables": "0.11.12", - "packages/google-cloud-access-approval": "1.14.0", - "packages/google-cloud-advisorynotifications": "0.3.11", - "packages/google-cloud-alloydb": "0.3.15", + "packages/google-area120-tables": "0.11.13", + "packages/google-cloud-access-approval": "1.14.1", + "packages/google-cloud-advisorynotifications": "0.3.12", + "packages/google-cloud-alloydb": "0.4.0", "packages/google-cloud-alloydb-connectors": "0.1.7", - "packages/google-cloud-api-gateway": "1.10.0", - "packages/google-cloud-api-keys": "0.5.12", - "packages/google-cloud-apigee-connect": "1.10.0", - "packages/google-cloud-apigee-registry": "0.6.12", - "packages/google-cloud-apihub": "0.2.1", - "packages/google-cloud-appengine-admin": "1.12.0", + "packages/google-cloud-api-gateway": "1.10.1", + "packages/google-cloud-api-keys": "0.5.13", + "packages/google-cloud-apigee-connect": "1.10.1", + "packages/google-cloud-apigee-registry": "0.6.13", + "packages/google-cloud-apihub": "0.2.2", + "packages/google-cloud-appengine-admin": "1.12.1", "packages/google-cloud-appengine-logging": "1.5.0", - "packages/google-cloud-apphub": "0.1.3", - "packages/google-cloud-artifact-registry": "1.12.0", - "packages/google-cloud-asset": "3.27.0", - "packages/google-cloud-assured-workloads": "1.13.0", - "packages/google-cloud-automl": "2.14.0", - "packages/google-cloud-backupdr": "0.1.5", - "packages/google-cloud-bare-metal-solution": "1.8.0", - "packages/google-cloud-batch": "0.17.30", - "packages/google-cloud-beyondcorp-appconnections": "0.4.12", - "packages/google-cloud-beyondcorp-appconnectors": "0.4.12", - "packages/google-cloud-beyondcorp-appgateways": "0.4.12", - "packages/google-cloud-beyondcorp-clientconnectorservices": "0.4.12", - "packages/google-cloud-beyondcorp-clientgateways": "0.4.11", - "packages/google-cloud-bigquery-analyticshub": "0.4.12", - "packages/google-cloud-bigquery-biglake": "0.4.10", - "packages/google-cloud-bigquery-connection": "1.16.0", - "packages/google-cloud-bigquery-data-exchange": "0.5.14", - "packages/google-cloud-bigquery-datapolicies": "0.6.9", - "packages/google-cloud-bigquery-datatransfer": "3.17.0", + "packages/google-cloud-apphub": "0.1.4", + "packages/google-cloud-artifact-registry": "1.13.1", + "packages/google-cloud-asset": "3.27.1", + "packages/google-cloud-assured-workloads": "1.13.1", + "packages/google-cloud-automl": "2.14.1", + "packages/google-cloud-backupdr": "0.1.6", + "packages/google-cloud-bare-metal-solution": "1.8.1", + "packages/google-cloud-batch": "0.17.31", + "packages/google-cloud-beyondcorp-appconnections": "0.4.13", + "packages/google-cloud-beyondcorp-appconnectors": "0.4.13", + "packages/google-cloud-beyondcorp-appgateways": "0.4.13", + "packages/google-cloud-beyondcorp-clientconnectorservices": "0.4.13", + "packages/google-cloud-beyondcorp-clientgateways": "0.4.12", + "packages/google-cloud-bigquery-analyticshub": "0.4.13", + "packages/google-cloud-bigquery-biglake": "0.4.11", + "packages/google-cloud-bigquery-connection": "1.16.1", + "packages/google-cloud-bigquery-data-exchange": "0.5.15", + "packages/google-cloud-bigquery-datapolicies": "0.6.10", + "packages/google-cloud-bigquery-datatransfer": "3.17.1", "packages/google-cloud-bigquery-logging": "1.5.0", - "packages/google-cloud-bigquery-migration": "0.11.10", - "packages/google-cloud-bigquery-reservation": "1.14.0", - "packages/google-cloud-billing": "1.14.0", - "packages/google-cloud-billing-budgets": "1.15.0", - "packages/google-cloud-binary-authorization": "1.11.0", - "packages/google-cloud-build": "3.26.0", - "packages/google-cloud-certificate-manager": "1.8.0", - "packages/google-cloud-channel": "1.20.0", - "packages/google-cloud-cloudcontrolspartner": "0.2.1", - "packages/google-cloud-commerce-consumer-procurement": "0.1.9", + "packages/google-cloud-bigquery-migration": "0.11.11", + "packages/google-cloud-bigquery-reservation": "1.14.1", + "packages/google-cloud-billing": "1.14.1", + "packages/google-cloud-billing-budgets": "1.15.1", + "packages/google-cloud-binary-authorization": "1.11.1", + "packages/google-cloud-build": "3.27.1", + "packages/google-cloud-certificate-manager": "1.8.1", + "packages/google-cloud-channel": "1.20.1", + "packages/google-cloud-cloudcontrolspartner": "0.2.2", + "packages/google-cloud-commerce-consumer-procurement": "0.1.10", "packages/google-cloud-common": "1.4.0", - "packages/google-cloud-compute": "1.20.0", - "packages/google-cloud-confidentialcomputing": "0.4.12", - "packages/google-cloud-config": "0.1.12", - "packages/google-cloud-contact-center-insights": "1.19.0", - "packages/google-cloud-container": "2.53.0", - "packages/google-cloud-containeranalysis": "2.15.0", - "packages/google-cloud-contentwarehouse": "0.7.10", - "packages/google-cloud-data-fusion": "1.11.0", - "packages/google-cloud-data-qna": "0.10.12", - "packages/google-cloud-datacatalog": "3.21.0", - "packages/google-cloud-datacatalog-lineage": "0.3.9", - "packages/google-cloud-dataflow-client": "0.8.13", - "packages/google-cloud-dataform": "0.5.12", - "packages/google-cloud-datalabeling": "1.11.0", - "packages/google-cloud-dataplex": "2.3.0", - "packages/google-cloud-dataproc": "5.15.0", - "packages/google-cloud-dataproc-metastore": "1.16.0", - "packages/google-cloud-datastream": "1.10.0", - "packages/google-cloud-deploy": "2.2.0", - "packages/google-cloud-developerconnect": "0.1.3", - "packages/google-cloud-dialogflow": "2.34.0", - "packages/google-cloud-dialogflow-cx": "1.36.0", - "packages/google-cloud-discoveryengine": "0.13.1", - "packages/google-cloud-dlp": "3.25.0", - "packages/google-cloud-dms": "1.10.0", - "packages/google-cloud-documentai": "2.35.0", - "packages/google-cloud-domains": "1.8.0", - "packages/google-cloud-edgecontainer": "0.5.12", - "packages/google-cloud-edgenetwork": "0.1.12", - "packages/google-cloud-enterpriseknowledgegraph": "0.3.12", - "packages/google-cloud-essential-contacts": "1.8.0", - "packages/google-cloud-eventarc": "1.12.0", - "packages/google-cloud-eventarc-publishing": "0.6.12", - "packages/google-cloud-filestore": "1.10.0", - "packages/google-cloud-functions": "1.18.0", - "packages/google-cloud-gdchardwaremanagement": "0.1.5", - "packages/google-cloud-gke-backup": "0.5.12", - "packages/google-cloud-gke-connect-gateway": "0.9.1", - "packages/google-cloud-gke-hub": "1.15.0", - "packages/google-cloud-gke-multicloud": "0.6.14", - "packages/google-cloud-gsuiteaddons": "0.3.11", - "packages/google-cloud-iam": "2.16.0", + "packages/google-cloud-compute": "1.21.0", + "packages/google-cloud-confidentialcomputing": "0.4.13", + "packages/google-cloud-config": "0.1.13", + "packages/google-cloud-contact-center-insights": "1.19.1", + "packages/google-cloud-container": "2.54.0", + "packages/google-cloud-containeranalysis": "2.15.1", + "packages/google-cloud-contentwarehouse": "0.7.11", + "packages/google-cloud-data-fusion": "1.11.1", + "packages/google-cloud-data-qna": "0.10.13", + "packages/google-cloud-datacatalog": "3.21.1", + "packages/google-cloud-datacatalog-lineage": "0.3.10", + "packages/google-cloud-dataflow-client": "0.8.14", + "packages/google-cloud-dataform": "0.5.13", + "packages/google-cloud-datalabeling": "1.11.1", + "packages/google-cloud-dataplex": "2.3.1", + "packages/google-cloud-dataproc": "5.15.1", + "packages/google-cloud-dataproc-metastore": "1.16.1", + "packages/google-cloud-datastream": "1.10.1", + "packages/google-cloud-deploy": "2.3.0", + "packages/google-cloud-developerconnect": "0.1.4", + "packages/google-cloud-dialogflow": "2.35.0", + "packages/google-cloud-dialogflow-cx": "1.37.0", + "packages/google-cloud-discoveryengine": "0.13.4", + "packages/google-cloud-dlp": "3.25.1", + "packages/google-cloud-dms": "1.10.1", + "packages/google-cloud-documentai": "3.0.1", + "packages/google-cloud-domains": "1.8.1", + "packages/google-cloud-edgecontainer": "0.5.14", + "packages/google-cloud-edgenetwork": "0.1.13", + "packages/google-cloud-enterpriseknowledgegraph": "0.3.13", + "packages/google-cloud-essential-contacts": "1.8.1", + "packages/google-cloud-eventarc": "1.13.1", + "packages/google-cloud-eventarc-publishing": "0.6.14", + "packages/google-cloud-filestore": "1.10.1", + "packages/google-cloud-functions": "1.18.1", + "packages/google-cloud-gdchardwaremanagement": "0.1.7", + "packages/google-cloud-gke-backup": "0.5.13", + "packages/google-cloud-gke-connect-gateway": "0.9.2", + "packages/google-cloud-gke-hub": "1.15.1", + "packages/google-cloud-gke-multicloud": "0.6.15", + "packages/google-cloud-gsuiteaddons": "0.3.12", + "packages/google-cloud-iam": "2.16.1", "packages/google-cloud-iam-logging": "1.4.0", - "packages/google-cloud-iap": "1.14.0", - "packages/google-cloud-ids": "1.8.0", - "packages/google-cloud-kms": "3.1.0", - "packages/google-cloud-kms-inventory": "0.2.10", - "packages/google-cloud-language": "2.15.0", - "packages/google-cloud-life-sciences": "0.9.13", - "packages/google-cloud-managed-identities": "1.10.0", - "packages/google-cloud-managedkafka": "0.1.4", - "packages/google-cloud-media-translation": "0.11.12", - "packages/google-cloud-memcache": "1.10.0", - "packages/google-cloud-migrationcenter": "0.1.10", - "packages/google-cloud-monitoring": "2.23.0", - "packages/google-cloud-monitoring-dashboards": "2.16.0", - "packages/google-cloud-monitoring-metrics-scopes": "1.7.0", - "packages/google-cloud-netapp": "0.3.15", - "packages/google-cloud-network-connectivity": "2.5.0", - "packages/google-cloud-network-management": "1.20.0", - "packages/google-cloud-network-security": "0.9.12", - "packages/google-cloud-network-services": "0.5.15", - "packages/google-cloud-notebooks": "1.11.0", - "packages/google-cloud-optimization": "1.9.0", - "packages/google-cloud-oracledatabase": "0.1.1", - "packages/google-cloud-orchestration-airflow": "1.15.0", - "packages/google-cloud-os-config": "1.18.0", - "packages/google-cloud-os-login": "2.15.0", - "packages/google-cloud-parallelstore": "0.2.5", - "packages/google-cloud-phishing-protection": "1.12.0", - "packages/google-cloud-policy-troubleshooter": "1.12.0", - "packages/google-cloud-policysimulator": "0.1.9", - "packages/google-cloud-policytroubleshooter-iam": "0.1.8", - "packages/google-cloud-private-ca": "1.13.0", - "packages/google-cloud-private-catalog": "0.9.12", - "packages/google-cloud-privilegedaccessmanager": "0.1.2", - "packages/google-cloud-public-ca": "0.3.13", - "packages/google-cloud-quotas": "0.1.11", - "packages/google-cloud-rapidmigrationassessment": "0.1.10", - "packages/google-cloud-recaptcha-enterprise": "1.23.0", - "packages/google-cloud-recommendations-ai": "0.10.13", - "packages/google-cloud-recommender": "2.16.0", - "packages/google-cloud-redis": "2.16.0", - "packages/google-cloud-redis-cluster": "0.1.9", - "packages/google-cloud-resource-manager": "1.13.0", - "packages/google-cloud-resource-settings": "1.10.0", - "packages/google-cloud-retail": "1.23.0", - "packages/google-cloud-run": "0.10.10", - "packages/google-cloud-scheduler": "2.14.0", - "packages/google-cloud-secret-manager": "2.21.0", - "packages/google-cloud-securesourcemanager": "0.1.10", - "packages/google-cloud-securitycenter": "1.35.0", - "packages/google-cloud-securitycentermanagement": "0.1.15", - "packages/google-cloud-service-control": "1.13.0", - "packages/google-cloud-service-directory": "1.12.0", - "packages/google-cloud-service-management": "1.10.0", - "packages/google-cloud-service-usage": "1.11.0", - "packages/google-cloud-servicehealth": "0.1.7", - "packages/google-cloud-shell": "1.10.0", + "packages/google-cloud-iap": "1.14.1", + "packages/google-cloud-ids": "1.8.1", + "packages/google-cloud-kms": "3.1.1", + "packages/google-cloud-kms-inventory": "0.2.11", + "packages/google-cloud-language": "2.15.1", + "packages/google-cloud-life-sciences": "0.9.14", + "packages/google-cloud-managed-identities": "1.10.1", + "packages/google-cloud-managedkafka": "0.1.5", + "packages/google-cloud-media-translation": "0.11.13", + "packages/google-cloud-memcache": "1.10.1", + "packages/google-cloud-migrationcenter": "0.1.11", + "packages/google-cloud-monitoring": "2.23.1", + "packages/google-cloud-monitoring-dashboards": "2.16.1", + "packages/google-cloud-monitoring-metrics-scopes": "1.7.1", + "packages/google-cloud-netapp": "0.3.16", + "packages/google-cloud-network-connectivity": "2.5.1", + "packages/google-cloud-network-management": "1.21.0", + "packages/google-cloud-network-security": "0.9.13", + "packages/google-cloud-network-services": "0.5.16", + "packages/google-cloud-notebooks": "1.11.1", + "packages/google-cloud-optimization": "1.9.1", + "packages/google-cloud-oracledatabase": "0.1.2", + "packages/google-cloud-orchestration-airflow": "1.15.1", + "packages/google-cloud-os-config": "1.18.1", + "packages/google-cloud-os-login": "2.15.1", + "packages/google-cloud-parallelstore": "0.2.6", + "packages/google-cloud-phishing-protection": "1.12.1", + "packages/google-cloud-policy-troubleshooter": "1.12.1", + "packages/google-cloud-policysimulator": "0.1.10", + "packages/google-cloud-policytroubleshooter-iam": "0.1.9", + "packages/google-cloud-private-ca": "1.13.1", + "packages/google-cloud-private-catalog": "0.9.13", + "packages/google-cloud-privilegedaccessmanager": "0.1.3", + "packages/google-cloud-public-ca": "0.3.14", + "packages/google-cloud-quotas": "0.1.12", + "packages/google-cloud-rapidmigrationassessment": "0.1.11", + "packages/google-cloud-recaptcha-enterprise": "1.24.1", + "packages/google-cloud-recommendations-ai": "0.10.14", + "packages/google-cloud-recommender": "2.16.1", + "packages/google-cloud-redis": "2.16.1", + "packages/google-cloud-redis-cluster": "0.1.10", + "packages/google-cloud-resource-manager": "1.13.1", + "packages/google-cloud-resource-settings": "1.10.1", + "packages/google-cloud-retail": "1.23.1", + "packages/google-cloud-run": "0.10.11", + "packages/google-cloud-scheduler": "2.14.1", + "packages/google-cloud-secret-manager": "2.21.1", + "packages/google-cloud-securesourcemanager": "0.1.11", + "packages/google-cloud-securitycenter": "1.35.1", + "packages/google-cloud-securitycentermanagement": "0.1.17", + "packages/google-cloud-service-control": "1.13.1", + "packages/google-cloud-service-directory": "1.12.1", + "packages/google-cloud-service-management": "1.11.0", + "packages/google-cloud-service-usage": "1.11.1", + "packages/google-cloud-servicehealth": "0.1.8", + "packages/google-cloud-shell": "1.10.1", "packages/google-cloud-source-context": "1.6.0", - "packages/google-cloud-speech": "2.28.0", - "packages/google-cloud-storage-control": "1.1.0", - "packages/google-cloud-storage-transfer": "1.13.0", - "packages/google-cloud-storageinsights": "0.1.11", - "packages/google-cloud-support": "0.1.10", - "packages/google-cloud-talent": "2.14.0", - "packages/google-cloud-tasks": "2.17.0", - "packages/google-cloud-telcoautomation": "0.2.6", - "packages/google-cloud-texttospeech": "2.20.0", - "packages/google-cloud-tpu": "1.19.0", - "packages/google-cloud-trace": "1.14.0", - "packages/google-cloud-translate": "3.17.0", - "packages/google-cloud-video-live-stream": "1.9.0", - "packages/google-cloud-video-stitcher": "0.7.13", - "packages/google-cloud-video-transcoder": "1.13.0", - "packages/google-cloud-videointelligence": "2.14.0", - "packages/google-cloud-vision": "3.8.0", - "packages/google-cloud-visionai": "0.1.4", - "packages/google-cloud-vm-migration": "1.9.0", - "packages/google-cloud-vmwareengine": "1.6.0", - "packages/google-cloud-vpc-access": "1.11.0", - "packages/google-cloud-webrisk": "1.15.0", - "packages/google-cloud-websecurityscanner": "1.15.0", - "packages/google-cloud-workflows": "1.15.0", - "packages/google-cloud-workstations": "0.5.9", + "packages/google-cloud-speech": "2.28.1", + "packages/google-cloud-storage-control": "1.1.1", + "packages/google-cloud-storage-transfer": "1.13.1", + "packages/google-cloud-storageinsights": "0.1.12", + "packages/google-cloud-support": "0.1.11", + "packages/google-cloud-talent": "2.14.1", + "packages/google-cloud-tasks": "2.17.1", + "packages/google-cloud-telcoautomation": "0.2.7", + "packages/google-cloud-texttospeech": "2.21.1", + "packages/google-cloud-tpu": "1.19.1", + "packages/google-cloud-trace": "1.14.1", + "packages/google-cloud-translate": "3.18.0", + "packages/google-cloud-video-live-stream": "1.9.1", + "packages/google-cloud-video-stitcher": "0.7.14", + "packages/google-cloud-video-transcoder": "1.13.1", + "packages/google-cloud-videointelligence": "2.14.1", + "packages/google-cloud-vision": "3.8.1", + "packages/google-cloud-visionai": "0.1.5", + "packages/google-cloud-vm-migration": "1.9.1", + "packages/google-cloud-vmwareengine": "1.6.1", + "packages/google-cloud-vpc-access": "1.11.1", + "packages/google-cloud-webrisk": "1.15.1", + "packages/google-cloud-websecurityscanner": "1.15.1", + "packages/google-cloud-workflows": "1.15.1", + "packages/google-cloud-workstations": "0.5.10", "packages/google-geo-type": "0.3.10", - "packages/google-maps-addressvalidation": "0.3.14", - "packages/google-maps-areainsights": "0.1.1", - "packages/google-maps-fleetengine": "0.2.3", - "packages/google-maps-fleetengine-delivery": "0.2.5", - "packages/google-maps-mapsplatformdatasets": "0.4.3", - "packages/google-maps-places": "0.1.19", - "packages/google-maps-routeoptimization": "0.1.5", - "packages/google-maps-routing": "0.6.11", - "packages/google-maps-solar": "0.1.3", - "packages/google-shopping-css": "0.1.9", - "packages/google-shopping-merchant-accounts": "0.2.1", - "packages/google-shopping-merchant-conversions": "0.1.4", - "packages/google-shopping-merchant-datasources": "0.1.4", - "packages/google-shopping-merchant-inventories": "0.1.10", - "packages/google-shopping-merchant-lfp": "0.1.4", - "packages/google-shopping-merchant-notifications": "0.1.3", - "packages/google-shopping-merchant-products": "0.1.3", - "packages/google-shopping-merchant-promotions": "0.1.3", - "packages/google-shopping-merchant-quota": "0.1.3", - "packages/google-shopping-merchant-reports": "0.1.10", + "packages/google-maps-addressvalidation": "0.3.15", + "packages/google-maps-areainsights": "0.1.2", + "packages/google-maps-fleetengine": "0.2.4", + "packages/google-maps-fleetengine-delivery": "0.2.6", + "packages/google-maps-mapsplatformdatasets": "0.4.4", + "packages/google-maps-places": "0.1.20", + "packages/google-maps-routeoptimization": "0.1.6", + "packages/google-maps-routing": "0.6.12", + "packages/google-maps-solar": "0.1.4", + "packages/google-shopping-css": "0.1.10", + "packages/google-shopping-merchant-accounts": "0.2.2", + "packages/google-shopping-merchant-conversions": "0.1.5", + "packages/google-shopping-merchant-datasources": "0.1.5", + "packages/google-shopping-merchant-inventories": "0.1.11", + "packages/google-shopping-merchant-lfp": "0.1.5", + "packages/google-shopping-merchant-notifications": "0.1.4", + "packages/google-shopping-merchant-products": "0.1.4", + "packages/google-shopping-merchant-promotions": "0.1.4", + "packages/google-shopping-merchant-quota": "0.1.4", + "packages/google-shopping-merchant-reports": "0.1.11", "packages/google-shopping-type": "0.1.9", - "packages/grafeas": "1.12.0" + "packages/grafeas": "1.12.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 329502ab6ea4..a6191fa4d541 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,205 +2,205 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- -- [google-ads-admanager==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) -- [google-ads-marketingplatform-admin==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) -- [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) -- [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) -- [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) -- [google-apps-meet==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) -- [google-apps-script-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) -- [google-area120-tables==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) -- [google-cloud-access-approval==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) -- [google-cloud-advisorynotifications==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) -- [google-cloud-alloydb-connectors==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) -- [google-cloud-alloydb==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) -- [google-cloud-api-gateway==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) -- [google-cloud-api-keys==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) -- [google-cloud-apigee-connect==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) -- [google-cloud-apigee-registry==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) -- [google-cloud-apihub==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) -- [google-cloud-appengine-admin==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) -- [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) -- [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) -- [google-cloud-artifact-registry==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-asset==3.26.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) -- [google-cloud-assured-workloads==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) -- [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) -- [google-cloud-backupdr==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) -- [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.29](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) -- [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) -- [google-cloud-beyondcorp-clientconnectorservices==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) -- [google-cloud-beyondcorp-clientgateways==0.4.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) -- [google-cloud-bigquery-analyticshub==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) -- [google-cloud-bigquery-biglake==0.4.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) -- [google-cloud-bigquery-connection==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) -- [google-cloud-bigquery-data-exchange==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) -- [google-cloud-bigquery-datapolicies==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) -- [google-cloud-bigquery-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) -- [google-cloud-bigquery-migration==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) -- [google-cloud-bigquery-reservation==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) -- [google-cloud-billing-budgets==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) -- [google-cloud-billing==1.13.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) -- [google-cloud-binary-authorization==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) -- [google-cloud-build==3.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) -- [google-cloud-certificate-manager==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) -- [google-cloud-channel==1.19.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-cloudcontrolspartner==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) -- [google-cloud-commerce-consumer-procurement==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) -- [google-cloud-common==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) -- [google-cloud-compute==1.19.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) -- [google-cloud-confidentialcomputing==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) -- [google-cloud-config==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) -- [google-cloud-contact-center-insights==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.52.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) -- [google-cloud-containeranalysis==2.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) -- [google-cloud-contentwarehouse==0.7.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) -- [google-cloud-data-fusion==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) -- [google-cloud-data-qna==0.10.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) -- [google-cloud-datacatalog-lineage==0.3.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) -- [google-cloud-datacatalog==3.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) -- [google-cloud-dataflow-client==0.8.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) -- [google-cloud-dataform==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) -- [google-cloud-datalabeling==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) -- [google-cloud-dataplex==2.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) -- [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) -- [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==2.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) -- [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) -- [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.33.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) -- [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.33.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) -- [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) -- [google-cloud-edgecontainer==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-edgenetwork==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) -- [google-cloud-enterpriseknowledgegraph==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) -- [google-cloud-essential-contacts==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) -- [google-cloud-eventarc-publishing==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) -- [google-cloud-eventarc==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) -- [google-cloud-filestore==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) -- [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gdchardwaremanagement==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) -- [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) -- [google-cloud-gke-connect-gateway==0.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) -- [google-cloud-gke-hub==1.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) -- [google-cloud-gsuiteaddons==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) -- [google-cloud-iam-logging==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) -- [google-cloud-iam==2.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) -- [google-cloud-iap==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) -- [google-cloud-ids==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) -- [google-cloud-kms-inventory==0.2.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==3.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) -- [google-cloud-language==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) -- [google-cloud-life-sciences==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) -- [google-cloud-managed-identities==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) -- [google-cloud-managedkafka==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) -- [google-cloud-media-translation==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) -- [google-cloud-memcache==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) -- [google-cloud-migrationcenter==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) -- [google-cloud-monitoring-dashboards==2.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) -- [google-cloud-monitoring-metrics-scopes==1.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) -- [google-cloud-monitoring==2.22.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) -- [google-cloud-network-connectivity==2.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) -- [google-cloud-network-management==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) -- [google-cloud-network-security==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) -- [google-cloud-network-services==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) -- [google-cloud-notebooks==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) -- [google-cloud-optimization==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-oracledatabase==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) -- [google-cloud-os-config==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) -- [google-cloud-os-login==2.14.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) -- [google-cloud-parallelstore==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) -- [google-cloud-phishing-protection==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) -- [google-cloud-policy-troubleshooter==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) -- [google-cloud-policysimulator==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) -- [google-cloud-policytroubleshooter-iam==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) -- [google-cloud-private-ca==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) -- [google-cloud-private-catalog==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) -- [google-cloud-privilegedaccessmanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) -- [google-cloud-public-ca==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) -- [google-cloud-quotas==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-quotas/CHANGELOG.md) -- [google-cloud-rapidmigrationassessment==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.22.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) -- [google-cloud-recommendations-ai==0.10.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) -- [google-cloud-recommender==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) -- [google-cloud-redis-cluster==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) -- [google-cloud-redis==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) -- [google-cloud-resource-manager==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) -- [google-cloud-resource-settings==1.9.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) -- [google-cloud-retail==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) -- [google-cloud-run==0.10.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) -- [google-cloud-scheduler==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) -- [google-cloud-secret-manager==2.20.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) -- [google-cloud-securesourcemanager==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) -- [google-cloud-securitycenter==1.34.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) -- [google-cloud-securitycentermanagement==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) -- [google-cloud-service-control==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) -- [google-cloud-service-directory==1.11.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-management==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) -- [google-cloud-service-usage==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) -- [google-cloud-servicehealth==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) -- [google-cloud-shell==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) -- [google-cloud-source-context==1.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) -- [google-cloud-speech==2.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) -- [google-cloud-storage-control==1.0.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) -- [google-cloud-storage-transfer==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) -- [google-cloud-storageinsights==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) -- [google-cloud-support==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) -- [google-cloud-talent==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) -- [google-cloud-tasks==2.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) -- [google-cloud-telcoautomation==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) -- [google-cloud-texttospeech==2.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) -- [google-cloud-tpu==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) -- [google-cloud-trace==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) -- [google-cloud-translate==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) -- [google-cloud-video-live-stream==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) -- [google-cloud-video-stitcher==0.7.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) -- [google-cloud-video-transcoder==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) -- [google-cloud-videointelligence==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) -- [google-cloud-vision==3.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-visionai==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) -- [google-cloud-vm-migration==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) -- [google-cloud-vmwareengine==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) -- [google-cloud-vpc-access==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) -- [google-cloud-webrisk==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) -- [google-cloud-websecurityscanner==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) -- [google-cloud-workflows==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) -- [google-cloud-workstations==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) -- [google-geo-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) -- [google-maps-addressvalidation==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-areainsights==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) -- [google-maps-fleetengine==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) -- [google-maps-mapsplatformdatasets==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routeoptimization==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) -- [google-maps-routing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) -- [google-maps-solar==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) -- [google-shopping-css==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) -- [google-shopping-merchant-accounts==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) -- [google-shopping-merchant-conversions==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) -- [google-shopping-merchant-datasources==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) -- [google-shopping-merchant-inventories==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) -- [google-shopping-merchant-lfp==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) -- [google-shopping-merchant-notifications==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications/CHANGELOG.md) -- [google-shopping-merchant-products==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/CHANGELOG.md) -- [google-shopping-merchant-promotions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/CHANGELOG.md) -- [google-shopping-merchant-quota==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-quota/CHANGELOG.md) -- [google-shopping-merchant-reports==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) -- [google-shopping-type==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) -- [grafeas==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) +- [google-ads-admanager==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) +- [google-ads-marketingplatform-admin==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-analytics-admin==0.23.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) +- [google-analytics-data==0.18.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-apps-card==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) +- [google-apps-chat==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-events-subscriptions==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) +- [google-apps-meet==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) +- [google-apps-script-type==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) +- [google-area120-tables==0.11.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) +- [google-cloud-access-approval==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) +- [google-cloud-advisorynotifications==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) +- [google-cloud-alloydb-connectors==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) +- [google-cloud-alloydb==0.3.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-api-gateway==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) +- [google-cloud-api-keys==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) +- [google-cloud-apigee-connect==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) +- [google-cloud-apigee-registry==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) +- [google-cloud-apihub==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) +- [google-cloud-appengine-admin==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) +- [google-cloud-appengine-logging==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) +- [google-cloud-apphub==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) +- [google-cloud-artifact-registry==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) +- [google-cloud-asset==3.27.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) +- [google-cloud-assured-workloads==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) +- [google-cloud-automl==2.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) +- [google-cloud-backupdr==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) +- [google-cloud-bare-metal-solution==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) +- [google-cloud-batch==0.17.31](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnections==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnectors==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) +- [google-cloud-beyondcorp-appgateways==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) +- [google-cloud-beyondcorp-clientconnectorservices==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) +- [google-cloud-beyondcorp-clientgateways==0.4.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) +- [google-cloud-bigquery-analyticshub==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) +- [google-cloud-bigquery-biglake==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) +- [google-cloud-bigquery-connection==1.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) +- [google-cloud-bigquery-data-exchange==0.5.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) +- [google-cloud-bigquery-datapolicies==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.17.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-logging==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) +- [google-cloud-bigquery-migration==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) +- [google-cloud-bigquery-reservation==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) +- [google-cloud-billing-budgets==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) +- [google-cloud-billing==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) +- [google-cloud-binary-authorization==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) +- [google-cloud-build==3.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) +- [google-cloud-certificate-manager==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) +- [google-cloud-channel==1.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) +- [google-cloud-cloudcontrolspartner==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) +- [google-cloud-commerce-consumer-procurement==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) +- [google-cloud-common==1.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) +- [google-cloud-compute==1.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) +- [google-cloud-confidentialcomputing==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) +- [google-cloud-config==0.1.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) +- [google-cloud-contact-center-insights==1.19.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) +- [google-cloud-container==2.53.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-containeranalysis==2.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) +- [google-cloud-contentwarehouse==0.7.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) +- [google-cloud-data-fusion==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) +- [google-cloud-data-qna==0.10.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) +- [google-cloud-datacatalog-lineage==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) +- [google-cloud-datacatalog==3.21.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) +- [google-cloud-dataflow-client==0.8.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) +- [google-cloud-dataform==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) +- [google-cloud-datalabeling==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) +- [google-cloud-dataplex==2.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) +- [google-cloud-dataproc-metastore==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) +- [google-cloud-dataproc==5.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-datastream==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) +- [google-cloud-deploy==2.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-developerconnect==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) +- [google-cloud-dialogflow-cx==1.36.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) +- [google-cloud-dialogflow==2.34.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-discoveryengine==0.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dlp==3.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dms==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) +- [google-cloud-documentai==3.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-domains==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) +- [google-cloud-edgecontainer==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) +- [google-cloud-edgenetwork==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) +- [google-cloud-enterpriseknowledgegraph==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) +- [google-cloud-essential-contacts==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) +- [google-cloud-eventarc-publishing==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) +- [google-cloud-eventarc==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) +- [google-cloud-filestore==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) +- [google-cloud-functions==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-gdchardwaremanagement==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) +- [google-cloud-gke-backup==0.5.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) +- [google-cloud-gke-connect-gateway==0.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) +- [google-cloud-gke-hub==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gsuiteaddons==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) +- [google-cloud-iam-logging==1.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) +- [google-cloud-iam==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) +- [google-cloud-iap==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) +- [google-cloud-ids==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) +- [google-cloud-kms-inventory==0.2.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) +- [google-cloud-kms==3.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-language==2.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) +- [google-cloud-life-sciences==0.9.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) +- [google-cloud-managed-identities==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) +- [google-cloud-managedkafka==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) +- [google-cloud-media-translation==0.11.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) +- [google-cloud-memcache==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) +- [google-cloud-migrationcenter==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) +- [google-cloud-monitoring-dashboards==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) +- [google-cloud-monitoring-metrics-scopes==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) +- [google-cloud-monitoring==2.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) +- [google-cloud-netapp==0.3.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-network-connectivity==2.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) +- [google-cloud-network-management==1.20.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) +- [google-cloud-network-security==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) +- [google-cloud-network-services==0.5.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) +- [google-cloud-notebooks==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) +- [google-cloud-optimization==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) +- [google-cloud-oracledatabase==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-os-config==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) +- [google-cloud-os-login==2.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) +- [google-cloud-parallelstore==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) +- [google-cloud-phishing-protection==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) +- [google-cloud-policy-troubleshooter==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) +- [google-cloud-policysimulator==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) +- [google-cloud-policytroubleshooter-iam==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) +- [google-cloud-private-ca==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) +- [google-cloud-private-catalog==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) +- [google-cloud-privilegedaccessmanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) +- [google-cloud-public-ca==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) +- [google-cloud-quotas==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-quotas/CHANGELOG.md) +- [google-cloud-rapidmigrationassessment==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.24.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recommendations-ai==0.10.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) +- [google-cloud-recommender==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) +- [google-cloud-redis-cluster==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) +- [google-cloud-redis==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) +- [google-cloud-resource-manager==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) +- [google-cloud-resource-settings==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) +- [google-cloud-retail==1.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) +- [google-cloud-run==0.10.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) +- [google-cloud-scheduler==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) +- [google-cloud-secret-manager==2.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) +- [google-cloud-securesourcemanager==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) +- [google-cloud-securitycenter==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) +- [google-cloud-securitycentermanagement==0.1.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) +- [google-cloud-service-control==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) +- [google-cloud-service-directory==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) +- [google-cloud-service-management==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) +- [google-cloud-service-usage==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) +- [google-cloud-servicehealth==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) +- [google-cloud-shell==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) +- [google-cloud-source-context==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) +- [google-cloud-speech==2.28.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) +- [google-cloud-storage-control==1.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) +- [google-cloud-storage-transfer==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) +- [google-cloud-storageinsights==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) +- [google-cloud-support==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) +- [google-cloud-talent==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) +- [google-cloud-tasks==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) +- [google-cloud-telcoautomation==0.2.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) +- [google-cloud-texttospeech==2.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) +- [google-cloud-tpu==1.19.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) +- [google-cloud-trace==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) +- [google-cloud-translate==3.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) +- [google-cloud-video-live-stream==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) +- [google-cloud-video-stitcher==0.7.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) +- [google-cloud-video-transcoder==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) +- [google-cloud-videointelligence==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) +- [google-cloud-vision==3.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) +- [google-cloud-visionai==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) +- [google-cloud-vm-migration==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) +- [google-cloud-vmwareengine==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) +- [google-cloud-vpc-access==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) +- [google-cloud-webrisk==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) +- [google-cloud-websecurityscanner==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) +- [google-cloud-workflows==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) +- [google-cloud-workstations==0.5.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) +- [google-geo-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) +- [google-maps-addressvalidation==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) +- [google-maps-areainsights==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-fleetengine==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) +- [google-maps-mapsplatformdatasets==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) +- [google-maps-places==0.1.19](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routeoptimization==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) +- [google-maps-routing==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) +- [google-maps-solar==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) +- [google-shopping-css==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) +- [google-shopping-merchant-accounts==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) +- [google-shopping-merchant-conversions==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) +- [google-shopping-merchant-datasources==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) +- [google-shopping-merchant-inventories==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) +- [google-shopping-merchant-lfp==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) +- [google-shopping-merchant-notifications==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications/CHANGELOG.md) +- [google-shopping-merchant-products==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/CHANGELOG.md) +- [google-shopping-merchant-promotions==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/CHANGELOG.md) +- [google-shopping-merchant-quota==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-quota/CHANGELOG.md) +- [google-shopping-merchant-reports==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) +- [google-shopping-type==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) +- [grafeas==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) diff --git a/packages/google-ads-admanager/CHANGELOG.md b/packages/google-ads-admanager/CHANGELOG.md index 51bdcd8282ab..c117314d65f7 100644 --- a/packages/google-ads-admanager/CHANGELOG.md +++ b/packages/google-ads-admanager/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.2](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.2.1...google-ads-admanager-v0.2.2) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.2.0...google-ads-admanager-v0.2.1) (2024-10-24) diff --git a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py index 6d8247638d59..d1a1a883babd 100644 --- a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py index 6d8247638d59..d1a1a883babd 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py index 3ba0ee87441f..c33e7ac32dd8 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py @@ -508,36 +508,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AdUnitServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -547,13 +517,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AdUnitServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py index bb81bb301cfc..2af110bc0a6d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py @@ -531,36 +531,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CompanyServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -570,13 +540,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CompanyServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py index 804dbce89b34..96798568f55a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py @@ -475,36 +475,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CustomFieldServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -514,13 +484,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CustomFieldServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py index 0610955d547a..dcf8d11e56a6 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py @@ -480,36 +480,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CustomTargetingKeyServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -519,13 +489,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CustomTargetingKeyServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py index 062aecd5ee4f..d368fd9f95b7 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py @@ -489,36 +489,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CustomTargetingValueServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -528,13 +498,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CustomTargetingValueServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py index dd85a50da159..072cd036e7f5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py @@ -480,36 +480,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -519,13 +489,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or EntitySignalsMappingServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py index 1d9ad0661c2d..6d6e51f15d37 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py @@ -468,36 +468,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NetworkServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -507,13 +477,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NetworkServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py index db1722c48eb7..3f8f3ca57f7f 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py @@ -582,36 +582,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = OrderServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -621,13 +591,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or OrderServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py index 05ef8cb3c56f..3c8735de6913 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py @@ -493,36 +493,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PlacementServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -532,13 +502,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PlacementServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py index 20d70845a1f4..5ccce8d9cf8a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py @@ -471,36 +471,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ReportServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -510,13 +480,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ReportServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py index 41e0d8a8d012..12b4ded58fdd 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py @@ -467,36 +467,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RoleServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -506,13 +476,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RoleServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py index bd2882549d58..34d2eecbffc5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py @@ -478,36 +478,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -517,13 +487,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TaxonomyCategoryServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py index 40bd220af88c..b8f7c7b12598 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py @@ -468,36 +468,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = UserServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -507,13 +477,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or UserServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json index 054c7d9907ed..2480467d0962 100644 --- a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json +++ b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ads-admanager", - "version": "0.2.1" + "version": "0.2.2" }, "snippets": [ { diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py index 1b1919529a2d..8ad32e14d084 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py @@ -316,85 +316,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AdUnitServiceClient, transports.AdUnitServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py index 81a94d69c50c..31192a252ba0 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CompanyServiceClient, transports.CompanyServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py index 7e5bac6fc19a..8277a7391d0e 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py @@ -330,85 +330,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CustomFieldServiceClient, transports.CustomFieldServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py index 217824f3cc97..c638f5bc671f 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py @@ -338,89 +338,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CustomTargetingKeyServiceClient, - transports.CustomTargetingKeyServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py index a9168197e4e1..3364cf5b4339 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py @@ -342,89 +342,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CustomTargetingValueServiceClient, - transports.CustomTargetingValueServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py index 744b2a57595e..6ac3e48f7132 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py @@ -342,89 +342,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - EntitySignalsMappingServiceClient, - transports.EntitySignalsMappingServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py index efbe6d4b9126..d81d1d2daacf 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py @@ -312,85 +312,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (NetworkServiceClient, transports.NetworkServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py index a9725a81786f..ff0845f7b8c7 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py @@ -305,85 +305,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (OrderServiceClient, transports.OrderServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py index 4cfcdf331f0c..e5ccc44d6400 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PlacementServiceClient, transports.PlacementServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py index f4e6569c5321..de1573b91071 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py @@ -323,85 +323,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ReportServiceClient, transports.ReportServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py index 5e624b9b8992..a47b868d77ca 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py @@ -294,85 +294,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RoleServiceClient, transports.RoleServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py index a4c067ca6e81..c27e580f33f3 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py @@ -334,89 +334,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - TaxonomyCategoryServiceClient, - transports.TaxonomyCategoryServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py index d3f850d8228e..c90a6aafca07 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py @@ -290,85 +290,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (UserServiceClient, transports.UserServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md index 08562be507e8..c93e09f63da1 100644 --- a/packages/google-ads-marketingplatform-admin/CHANGELOG.md +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-ads-marketingplatform-admin-v0.1.1...google-ads-marketingplatform-admin-v0.1.2) (2024-11-11) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + ## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-ads-marketingplatform-admin-v0.1.0...google-ads-marketingplatform-admin-v0.1.1) (2024-10-24) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py index 0c7cc68730c4..3b0a9d9a8d43 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.1" # {x-release-please-version} +__version__ = "0.1.2" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py index 0c7cc68730c4..3b0a9d9a8d43 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.1" # {x-release-please-version} +__version__ = "0.1.2" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py index f31761153ab6..5128a184339f 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py @@ -517,36 +517,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -556,13 +526,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MarketingplatformAdminServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json index 30ef02019efe..7e4cf53f136e 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ads-marketingplatform-admin", - "version": "0.1.1" + "version": "0.1.2" }, "snippets": [ { diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py index 40b9d151000f..96938b18f7ab 100644 --- a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py @@ -350,94 +350,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - MarketingplatformAdminServiceClient, - transports.MarketingplatformAdminServiceGrpcTransport, - "grpc", - ), - ( - MarketingplatformAdminServiceClient, - transports.MarketingplatformAdminServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index 86db3cb6e4e5..19b681ab6681 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.6.12](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.11...google-ai-generativelanguage-v0.6.12) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.6.11](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.10...google-ai-generativelanguage-v0.6.11) (2024-10-24) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py index 8df3c91628d2..3194a369d1f6 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py @@ -460,36 +460,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GenerativeServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -499,13 +469,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GenerativeServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py index 5334e45117bc..18edd759c7e4 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py @@ -456,36 +456,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ModelServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -495,13 +465,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ModelServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py index e5ce080ddfcb..1aa7d84159e5 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py @@ -482,36 +482,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CacheServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -521,13 +491,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CacheServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py index 3e8160f99d4d..656540db3a94 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py @@ -459,36 +459,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DiscussServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -498,13 +468,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DiscussServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py index 87de8e15bc6d..15732a3754a0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py @@ -456,36 +456,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FileServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -495,13 +465,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FileServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py index 64141f79649a..ab10a4cf4406 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py @@ -475,36 +475,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GenerativeServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -514,13 +484,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GenerativeServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py index c35b6b3c2168..6bde8dc8aad9 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py @@ -477,36 +477,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ModelServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -516,13 +486,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ModelServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py index 61db870f8cb2..fa1ec2eb1fc8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py @@ -465,36 +465,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PermissionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -504,13 +474,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PermissionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py index 48736239098d..2feee7d907b6 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py @@ -456,36 +456,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -495,13 +465,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PredictionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py index 2efc8f181dd1..ba7e81a48515 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py @@ -499,36 +499,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RetrieverServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -538,13 +508,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RetrieverServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py index 37c0ff946c1e..1cb5aa54494e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py @@ -457,36 +457,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TextServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -496,13 +466,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TextServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py index 18bbc6108ddd..76d71583a294 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py @@ -457,36 +457,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DiscussServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -496,13 +466,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DiscussServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py index b4c965681988..ded53d4a1793 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py @@ -454,36 +454,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ModelServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -493,13 +463,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ModelServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py index 74a4d9424c53..9663add6a459 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py @@ -455,36 +455,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TextServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -494,13 +464,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TextServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py index 3f96ccafb73a..9c0f32e7be0a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py @@ -459,36 +459,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DiscussServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -498,13 +468,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DiscussServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py index f76ac868c667..3e4196c6d5d8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py @@ -477,36 +477,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ModelServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -516,13 +486,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ModelServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py index fbbd86c3e7d5..6d126b1d3c03 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py @@ -480,36 +480,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PermissionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -519,13 +489,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PermissionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py index ad257c9d2909..a9ca258b4b31 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py @@ -457,36 +457,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TextServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -496,13 +466,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TextServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index 2a3900f7d9fc..8a2a6c91e868 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.11" + "version": "0.6.12" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index 92c04dafd1ea..9ec94c204cf3 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.11" + "version": "0.6.12" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index bf9329fcee8f..33ea36110ba5 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.11" + "version": "0.6.12" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 88a39493c282..e75232ce026a 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.11" + "version": "0.6.12" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py index 715f4b18c201..52ce2d8fb7da 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py @@ -329,86 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), - (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py index 15b7aa9d348c..4321e032b0e9 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py @@ -304,86 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), - (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py index 4da54c2f1ed1..aaf5e8c0a8ef 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -313,86 +313,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc"), - (CacheServiceClient, transports.CacheServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py index 5700d8058173..5a22f6fbbf77 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py @@ -318,86 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), - (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py index 4137b47b47c9..fb860e96f676 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py @@ -304,86 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FileServiceClient, transports.FileServiceGrpcTransport, "grpc"), - (FileServiceClient, transports.FileServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py index b2464bad5159..e5be0ab99b6d 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py @@ -334,86 +334,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), - (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index fadad45162ed..281420e86a16 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -317,86 +317,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), - (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py index 6940cf61ff48..abb7d4b5b287 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py @@ -331,86 +331,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PermissionServiceClient, transports.PermissionServiceGrpcTransport, "grpc"), - (PermissionServiceClient, transports.PermissionServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py index 2553c659f7a1..38d84a9a2b8d 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py @@ -328,86 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py index 1a5cf0f47db9..4ba6216c3c8c 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py @@ -322,86 +322,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RetrieverServiceClient, transports.RetrieverServiceGrpcTransport, "grpc"), - (RetrieverServiceClient, transports.RetrieverServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py index 0d4dd8f9b886..0c848a4d1a6e 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py @@ -299,86 +299,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), - (TextServiceClient, transports.TextServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py index ea09837f70b5..6745f00aecb3 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py @@ -317,86 +317,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), - (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py index 5a343846e529..eac582efe3d0 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py @@ -303,86 +303,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), - (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py index eb6bcfec2536..5dce1f43b446 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py @@ -298,86 +298,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), - (TextServiceClient, transports.TextServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py index bfd0a465e928..0440cb22380c 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py @@ -318,86 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), - (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py index d0e31fb13ffe..8247ca3c9236 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py @@ -317,86 +317,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), - (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py index dd7ce80bfffb..749dd92ab0b7 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py @@ -331,86 +331,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PermissionServiceClient, transports.PermissionServiceGrpcTransport, "grpc"), - (PermissionServiceClient, transports.PermissionServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py index cf452f7e5433..f5fda3b37e0b 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py @@ -299,86 +299,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), - (TextServiceClient, transports.TextServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-analytics-admin/CHANGELOG.md b/packages/google-analytics-admin/CHANGELOG.md index 280a75f58ba5..d0a27b83e4bf 100644 --- a/packages/google-analytics-admin/CHANGELOG.md +++ b/packages/google-analytics-admin/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.23.2](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.23.1...google-analytics-admin-v0.23.2) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.23.1](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.23.0...google-analytics-admin-v0.23.1) (2024-10-24) diff --git a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py index a63d749c3969..ea568fc1a5a7 100644 --- a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.1" # {x-release-please-version} +__version__ = "0.23.2" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py index a63d749c3969..ea568fc1a5a7 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.1" # {x-release-please-version} +__version__ = "0.23.2" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py index 5ac6b7f05edf..bfc42cfdb936 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py @@ -1100,36 +1100,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AnalyticsAdminServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -1139,13 +1109,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AnalyticsAdminServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py index a63d749c3969..ea568fc1a5a7 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.1" # {x-release-please-version} +__version__ = "0.23.2" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py index baf4b1ed050c..990b9511e610 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py @@ -697,36 +697,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AnalyticsAdminServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -736,13 +706,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AnalyticsAdminServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json index bfb7e2f7c846..5141448d6c81 100644 --- a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json +++ b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-admin", - "version": "0.23.1" + "version": "0.23.2" }, "snippets": [ { diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py index 622f53317f23..881d7f07eeda 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py @@ -353,94 +353,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AnalyticsAdminServiceClient, - transports.AnalyticsAdminServiceGrpcTransport, - "grpc", - ), - ( - AnalyticsAdminServiceClient, - transports.AnalyticsAdminServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py index 9919c341e9f4..9b5864916859 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py @@ -342,94 +342,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AnalyticsAdminServiceClient, - transports.AnalyticsAdminServiceGrpcTransport, - "grpc", - ), - ( - AnalyticsAdminServiceClient, - transports.AnalyticsAdminServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-analytics-data/CHANGELOG.md b/packages/google-analytics-data/CHANGELOG.md index 0f3b7524c13c..a4e2a9305477 100644 --- a/packages/google-analytics-data/CHANGELOG.md +++ b/packages/google-analytics-data/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.18.15](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.14...google-analytics-data-v0.18.15) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.18.14](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.13...google-analytics-data-v0.18.14) (2024-10-24) diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index e99a91f4ef5b..f9d274b1cc84 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.14" # {x-release-please-version} +__version__ = "0.18.15" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index e99a91f4ef5b..f9d274b1cc84 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.14" # {x-release-please-version} +__version__ = "0.18.15" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index 2b333ae4af4a..4911b5cf8c7e 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -517,36 +517,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AlphaAnalyticsDataClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -556,13 +526,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AlphaAnalyticsDataClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index e99a91f4ef5b..f9d274b1cc84 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.14" # {x-release-please-version} +__version__ = "0.18.15" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py index 0e5f00491f32..1ab80c89ab8b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py @@ -479,36 +479,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BetaAnalyticsDataClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -518,13 +488,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BetaAnalyticsDataClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 91daeaf79a0e..0366b1d9f8a8 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.14" + "version": "0.18.15" }, "snippets": [ { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index 4fd8e98c09ba..b65d9d8958c0 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.14" + "version": "0.18.15" }, "snippets": [ { diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 39b4b1e0362f..c349785c674b 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -343,86 +343,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataGrpcTransport, "grpc"), - (AlphaAnalyticsDataClient, transports.AlphaAnalyticsDataRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py index 500148343b0b..c32d40baa2b6 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -338,86 +338,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc"), - (BetaAnalyticsDataClient, transports.BetaAnalyticsDataRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index b88b195d48a9..7e8bf5b321a0 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.14](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.13...google-apps-chat-v0.1.14) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.1.13](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.12...google-apps-chat-v0.1.13) (2024-10-24) diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 7daf9a1dd221..7a4d810a47da 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.13" # {x-release-please-version} +__version__ = "0.1.14" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 7daf9a1dd221..7a4d810a47da 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.13" # {x-release-please-version} +__version__ = "0.1.14" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 54e871750f4b..f38740dd5526 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -654,36 +654,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ChatServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -693,13 +663,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ChatServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 8cf1ba757e75..b4693d56a9b2 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.13" + "version": "0.1.14" }, "snippets": [ { diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 3115a124f161..b5c828d4d20d 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -328,86 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ChatServiceClient, transports.ChatServiceGrpcTransport, "grpc"), - (ChatServiceClient, transports.ChatServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-apps-events-subscriptions/CHANGELOG.md b/packages/google-apps-events-subscriptions/CHANGELOG.md index 37420dec6b4c..69548d98ca25 100644 --- a/packages/google-apps-events-subscriptions/CHANGELOG.md +++ b/packages/google-apps-events-subscriptions/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-apps-events-subscriptions-v0.1.3...google-apps-events-subscriptions-v0.1.4) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-apps-events-subscriptions-v0.1.2...google-apps-events-subscriptions-v0.1.3) (2024-10-24) diff --git a/packages/google-apps-events-subscriptions/events-subscriptions-v1-py.tar.gz b/packages/google-apps-events-subscriptions/events-subscriptions-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py index 9059b3361f64..44bd27225c06 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py @@ -499,36 +499,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SubscriptionsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -538,13 +508,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SubscriptionsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json index 1a915cd4960a..3d32acd71a3d 100644 --- a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json +++ b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-events-subscriptions", - "version": "0.1.3" + "version": "0.1.4" }, "snippets": [ { diff --git a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py index a4ffe34c1ba0..d1ea54d0926c 100644 --- a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py +++ b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py @@ -350,94 +350,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - SubscriptionsServiceClient, - transports.SubscriptionsServiceGrpcTransport, - "grpc", - ), - ( - SubscriptionsServiceClient, - transports.SubscriptionsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-apps-meet/CHANGELOG.md b/packages/google-apps-meet/CHANGELOG.md index 436c26e7100d..d6d442ccb40e 100644 --- a/packages/google-apps-meet/CHANGELOG.md +++ b/packages/google-apps-meet/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-meet-v0.1.9...google-apps-meet-v0.1.10) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-apps-meet-v0.1.8...google-apps-meet-v0.1.9) (2024-10-24) diff --git a/packages/google-apps-meet/google/apps/meet/gapic_version.py b/packages/google-apps-meet/google/apps/meet/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-apps-meet/google/apps/meet/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py index 4001fe724b57..55f1e5855eab 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py @@ -579,36 +579,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConferenceRecordsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -618,13 +588,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConferenceRecordsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py index 7a6bed8a44d2..707bba6f8b1f 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py @@ -468,36 +468,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SpacesServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -507,13 +477,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SpacesServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py index 4764e9d20ae9..05b4fac29dda 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py @@ -579,36 +579,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConferenceRecordsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -618,13 +588,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConferenceRecordsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py index a8c4f208485c..4280b547792d 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py @@ -468,36 +468,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SpacesServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -507,13 +477,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SpacesServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json index a0252a24c186..7c2d00f3bd03 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.9" + "version": "0.1.10" }, "snippets": [ { diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json index f080ac6e008d..035063797335 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.9" + "version": "0.1.10" }, "snippets": [ { diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py index 88328e89b302..3a6c0733060e 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py @@ -336,94 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ConferenceRecordsServiceClient, - transports.ConferenceRecordsServiceGrpcTransport, - "grpc", - ), - ( - ConferenceRecordsServiceClient, - transports.ConferenceRecordsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py index cc8817eb4937..d9e07ca663a9 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py @@ -314,86 +314,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SpacesServiceClient, transports.SpacesServiceGrpcTransport, "grpc"), - (SpacesServiceClient, transports.SpacesServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py index b95f8ee88f79..d6473b5f7155 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py @@ -336,94 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ConferenceRecordsServiceClient, - transports.ConferenceRecordsServiceGrpcTransport, - "grpc", - ), - ( - ConferenceRecordsServiceClient, - transports.ConferenceRecordsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py index 1493ba93b11c..810e4251778b 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py @@ -314,86 +314,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SpacesServiceClient, transports.SpacesServiceGrpcTransport, "grpc"), - (SpacesServiceClient, transports.SpacesServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-area120-tables/CHANGELOG.md b/packages/google-area120-tables/CHANGELOG.md index 6b482237128d..fdbb14de13b7 100644 --- a/packages/google-area120-tables/CHANGELOG.md +++ b/packages/google-area120-tables/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.11.13](https://github.com/googleapis/google-cloud-python/compare/google-area120-tables-v0.11.12...google-area120-tables-v0.11.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.11.12](https://github.com/googleapis/google-cloud-python/compare/google-area120-tables-v0.11.11...google-area120-tables-v0.11.12) (2024-10-24) diff --git a/packages/google-area120-tables/google/area120/tables/gapic_version.py b/packages/google-area120-tables/google/area120/tables/gapic_version.py index 2566b8be8361..082d8f13abe1 100644 --- a/packages/google-area120-tables/google/area120/tables/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.12" # {x-release-please-version} +__version__ = "0.11.13" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py index 2566b8be8361..082d8f13abe1 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.12" # {x-release-please-version} +__version__ = "0.11.13" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py index 871e6857384e..3da7cb1d546b 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py @@ -500,36 +500,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TablesServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -539,13 +509,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TablesServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json index 7f849d31c21e..a436ed8a890c 100644 --- a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json +++ b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-area120-tables", - "version": "0.11.12" + "version": "0.11.13" }, "snippets": [ { diff --git a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py index 4f0221b25d68..d1015f86f246 100644 --- a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py +++ b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py @@ -316,86 +316,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TablesServiceClient, transports.TablesServiceGrpcTransport, "grpc"), - (TablesServiceClient, transports.TablesServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-access-approval/CHANGELOG.md b/packages/google-cloud-access-approval/CHANGELOG.md index 1bbd3c69cb10..bc1996c7e54d 100644 --- a/packages/google-cloud-access-approval/CHANGELOG.md +++ b/packages/google-cloud-access-approval/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.14.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-access-approval-v1.14.0...google-cloud-access-approval-v1.14.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-access-approval-v1.13.5...google-cloud-access-approval-v1.14.0) (2024-10-24) diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py index 2159c8af6f8e..231f5cf041ff 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py index 2159c8af6f8e..231f5cf041ff 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py index 3a134b51689f..73acca6528d2 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py @@ -527,36 +527,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AccessApprovalClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -566,13 +536,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AccessApprovalClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json index cacb025281c3..2daa4d5daa12 100644 --- a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json +++ b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-access-approval", - "version": "1.14.0" + "version": "1.14.1" }, "snippets": [ { diff --git a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py index 9879fc9e98e7..47aabbb3b84e 100644 --- a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py +++ b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py @@ -320,86 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AccessApprovalClient, transports.AccessApprovalGrpcTransport, "grpc"), - (AccessApprovalClient, transports.AccessApprovalRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-advisorynotifications/CHANGELOG.md b/packages/google-cloud-advisorynotifications/CHANGELOG.md index 9ad74ede8fa7..3110e03f654c 100644 --- a/packages/google-cloud-advisorynotifications/CHANGELOG.md +++ b/packages/google-cloud-advisorynotifications/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.3.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.3.11...google-cloud-advisorynotifications-v0.3.12) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.3.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.3.10...google-cloud-advisorynotifications-v0.3.11) (2024-10-24) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py index 075108786e34..ab68833be4be 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.12" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py index 075108786e34..ab68833be4be 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.12" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py index 1596808d73f2..9c15dcb41216 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py @@ -491,36 +491,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AdvisoryNotificationsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -530,13 +500,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AdvisoryNotificationsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json index 932eb864b320..ef66fef02aae 100644 --- a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-advisorynotifications", - "version": "0.3.11" + "version": "0.3.12" }, "snippets": [ { diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py index c581d5978383..d86d398f7521 100644 --- a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py @@ -346,94 +346,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AdvisoryNotificationsServiceClient, - transports.AdvisoryNotificationsServiceGrpcTransport, - "grpc", - ), - ( - AdvisoryNotificationsServiceClient, - transports.AdvisoryNotificationsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md index 8ceb68d9bc24..e078a11be468 100644 --- a/packages/google-cloud-alloydb/CHANGELOG.md +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -1,5 +1,44 @@ # Changelog +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.16...google-cloud-alloydb-v0.4.0) (2024-11-11) + + +### ⚠ BREAKING CHANGES + +* deprecated various PSC instance configuration fields + +### Features + +* add more observability options on the Instance level ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add new API to execute SQL statements ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add new API to perform a promotion or switchover on secondary instances ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add new API to upgrade a cluster ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add new CloudSQL backup resource ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add new cluster and instance level configurations to interact with Gemini ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add new PSC instance configuration setting and output the PSC DNS name ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add optional field to keep extra roles on a user if it already exists ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add support for Free Trials ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* add support to schedule maintenance ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* additional field to set tags on a backup or cluster ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) +* support for obtaining the public ip addresses of an instance and enabling outbound public ip ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) + + +### Bug Fixes + +* deprecated various PSC instance configuration fields ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) + + +### Documentation + +* various typo fixes, correcting the formatting, and clarifications on the request_id and validate_only fields in API requests and on the page_size when listing the database ([68a04ad](https://github.com/googleapis/google-cloud-python/commit/68a04ad07c42eb9f64861feb55018922be7963da)) + +## [0.3.16](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.15...google-cloud-alloydb-v0.3.16) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.3.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.14...google-cloud-alloydb-v0.3.15) (2024-10-24) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index 7d28791e7569..386ddb96d97f 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.15" # {x-release-please-version} +__version__ = "0.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index 7d28791e7569..386ddb96d97f 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.15" # {x-release-please-version} +__version__ = "0.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py index ac1c5be463ff..6b93cfa2f619 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py @@ -654,36 +654,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AlloyDBAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -693,13 +663,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AlloyDBAdminClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py index 54a740f73f56..188b6c197ff3 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py @@ -19,6 +19,9 @@ from .services.alloy_db_admin import AlloyDBAdminAsyncClient, AlloyDBAdminClient +from .types.csql_resources import CloudSQLBackupRunSource +from .types.data_model import SqlResult, SqlResultColumn, SqlResultRow, SqlResultValue +from .types.gemini import GeminiClusterConfig, GeminiInstanceConfig from .types.resources import ( AutomatedBackupPolicy, Backup, @@ -35,8 +38,11 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, + SubscriptionType, SupportedDatabaseFlag, User, UserPassword, @@ -57,6 +63,9 @@ DeleteClusterRequest, DeleteInstanceRequest, DeleteUserRequest, + ExecuteSqlMetadata, + ExecuteSqlRequest, + ExecuteSqlResponse, FailoverInstanceRequest, GenerateClientCertificateRequest, GenerateClientCertificateResponse, @@ -80,12 +89,17 @@ ListUsersResponse, OperationMetadata, PromoteClusterRequest, + PromoteClusterStatus, RestartInstanceRequest, RestoreClusterRequest, + SwitchoverClusterRequest, UpdateBackupRequest, UpdateClusterRequest, UpdateInstanceRequest, UpdateUserRequest, + UpgradeClusterRequest, + UpgradeClusterResponse, + UpgradeClusterStatus, ) __all__ = ( @@ -98,6 +112,7 @@ "BatchCreateInstancesMetadata", "BatchCreateInstancesRequest", "BatchCreateInstancesResponse", + "CloudSQLBackupRunSource", "Cluster", "ClusterView", "ConnectionInfo", @@ -119,7 +134,12 @@ "DeleteUserRequest", "EncryptionConfig", "EncryptionInfo", + "ExecuteSqlMetadata", + "ExecuteSqlRequest", + "ExecuteSqlResponse", "FailoverInstanceRequest", + "GeminiClusterConfig", + "GeminiInstanceConfig", "GenerateClientCertificateRequest", "GenerateClientCertificateResponse", "GetBackupRequest", @@ -142,17 +162,29 @@ "ListSupportedDatabaseFlagsResponse", "ListUsersRequest", "ListUsersResponse", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "OperationMetadata", "PromoteClusterRequest", + "PromoteClusterStatus", "RestartInstanceRequest", "RestoreClusterRequest", + "SqlResult", + "SqlResultColumn", + "SqlResultRow", + "SqlResultValue", "SslConfig", + "SubscriptionType", "SupportedDatabaseFlag", + "SwitchoverClusterRequest", "UpdateBackupRequest", "UpdateClusterRequest", "UpdateInstanceRequest", "UpdateUserRequest", + "UpgradeClusterRequest", + "UpgradeClusterResponse", + "UpgradeClusterStatus", "User", "UserPassword", ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json index 12158725f2f2..e5bd822ac65e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json @@ -65,6 +65,11 @@ "delete_user" ] }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, "FailoverInstance": { "methods": [ "failover_instance" @@ -150,6 +155,11 @@ "restore_cluster" ] }, + "SwitchoverCluster": { + "methods": [ + "switchover_cluster" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -169,6 +179,11 @@ "methods": [ "update_user" ] + }, + "UpgradeCluster": { + "methods": [ + "upgrade_cluster" + ] } } }, @@ -230,6 +245,11 @@ "delete_user" ] }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, "FailoverInstance": { "methods": [ "failover_instance" @@ -315,6 +335,11 @@ "restore_cluster" ] }, + "SwitchoverCluster": { + "methods": [ + "switchover_cluster" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -334,6 +359,11 @@ "methods": [ "update_user" ] + }, + "UpgradeCluster": { + "methods": [ + "upgrade_cluster" + ] } } }, @@ -395,6 +425,11 @@ "delete_user" ] }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, "FailoverInstance": { "methods": [ "failover_instance" @@ -480,6 +515,11 @@ "restore_cluster" ] }, + "SwitchoverCluster": { + "methods": [ + "switchover_cluster" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -499,6 +539,11 @@ "methods": [ "update_user" ] + }, + "UpgradeCluster": { + "methods": [ + "upgrade_cluster" + ] } } } diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index 7d28791e7569..386ddb96d97f 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.15" # {x-release-please-version} +__version__ = "0.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py index 0f445dd87757..a76ea9d81e7f 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py @@ -53,7 +53,13 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.alloydb_v1alpha.services.alloy_db_admin import pagers -from google.cloud.alloydb_v1alpha.types import resources, service +from google.cloud.alloydb_v1alpha.types import ( + csql_resources, + data_model, + gemini, + resources, + service, +) from .client import AlloyDBAdminClient from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport @@ -806,6 +812,139 @@ async def sample_update_cluster(): # Done; return the response. return response + async def upgrade_cluster( + self, + request: Optional[Union[service.UpgradeClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + version: Optional[resources.DatabaseVersion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Upgrades a single Cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.UpgradeClusterRequest, dict]]): + The request object. Upgrades a cluster. + name (:class:`str`): + Required. The resource name of the + cluster. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (:class:`google.cloud.alloydb_v1alpha.types.DatabaseVersion`): + Required. The version the cluster is + going to be upgraded to. + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse` + UpgradeClusterResponse contains the response for upgrade + cluster operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpgradeClusterRequest): + request = service.UpgradeClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if version is not None: + request.version = version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.upgrade_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + service.UpgradeClusterResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + async def delete_cluster( self, request: Optional[Union[service.DeleteClusterRequest, dict]] = None, @@ -1059,6 +1198,132 @@ async def sample_promote_cluster(): # Done; return the response. return response + async def switchover_cluster( + self, + request: Optional[Union[service.SwitchoverClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_switchover_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.SwitchoverClusterRequest, dict]]): + The request object. Message for switching over to a + cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SwitchoverClusterRequest): + request = service.SwitchoverClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.switchover_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + async def restore_cluster( self, request: Optional[Union[service.RestoreClusterRequest, dict]] = None, @@ -2605,6 +2870,162 @@ async def sample_restart_instance(): # Done; return the response. return response + async def execute_sql( + self, + request: Optional[Union[service.ExecuteSqlRequest, dict]] = None, + *, + instance: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + sql_statement: Optional[str] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ExecuteSqlResponse: + r"""Executes a SQL statement in a database inside an + AlloyDB instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_execute_sql(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.ExecuteSqlRequest, dict]]): + The request object. Request for ExecuteSql rpc. + instance (:class:`str`): + Required. The instance where the SQL + will be executed. For the required + format, see the comment on the + Instance.name field. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (:class:`str`): + Required. Name of the database where the query will be + executed. Note - Value provided should be the same as + expected from ``SELECT current_database();`` and NOT as + a resource reference. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + user (:class:`str`): + Required. Database user to be used for executing the + SQL. Note - Value provided should be the same as + expected from ``SELECT current_user;`` and NOT as a + resource reference. + + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sql_statement (:class:`str`): + Required. SQL statement to execute on + database. Any valid statement is + permitted, including DDL, DML, DQL + statements. + + This corresponds to the ``sql_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + password (:class:`str`): + Optional. The database native user’s + password. + + This corresponds to the ``password`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.ExecuteSqlResponse: + Execute a SQL statement response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, database, user, sql_statement, password]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ExecuteSqlRequest): + request = service.ExecuteSqlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if database is not None: + request.database = database + if user is not None: + request.user = user + if sql_statement is not None: + request.sql_statement = sql_statement + if password is not None: + request.password = password + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.execute_sql + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_backups( self, request: Optional[Union[service.ListBackupsRequest, dict]] = None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py index a53ef8c153df..9881348032e2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -59,7 +59,13 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.alloydb_v1alpha.services.alloy_db_admin import pagers -from google.cloud.alloydb_v1alpha.types import resources, service +from google.cloud.alloydb_v1alpha.types import ( + csql_resources, + data_model, + gemini, + resources, + service, +) from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport from .transports.grpc import AlloyDBAdminGrpcTransport @@ -654,36 +660,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AlloyDBAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -693,13 +669,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AlloyDBAdminClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -1377,6 +1349,136 @@ def sample_update_cluster(): # Done; return the response. return response + def upgrade_cluster( + self, + request: Optional[Union[service.UpgradeClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + version: Optional[resources.DatabaseVersion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Upgrades a single Cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.UpgradeClusterRequest, dict]): + The request object. Upgrades a cluster. + name (str): + Required. The resource name of the + cluster. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (google.cloud.alloydb_v1alpha.types.DatabaseVersion): + Required. The version the cluster is + going to be upgraded to. + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse` + UpgradeClusterResponse contains the response for upgrade + cluster operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpgradeClusterRequest): + request = service.UpgradeClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if version is not None: + request.version = version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.upgrade_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + service.UpgradeClusterResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + def delete_cluster( self, request: Optional[Union[service.DeleteClusterRequest, dict]] = None, @@ -1624,6 +1726,129 @@ def sample_promote_cluster(): # Done; return the response. return response + def switchover_cluster( + self, + request: Optional[Union[service.SwitchoverClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_switchover_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.SwitchoverClusterRequest, dict]): + The request object. Message for switching over to a + cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1alpha.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SwitchoverClusterRequest): + request = service.SwitchoverClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.switchover_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + def restore_cluster( self, request: Optional[Union[service.RestoreClusterRequest, dict]] = None, @@ -3138,6 +3363,159 @@ def sample_restart_instance(): # Done; return the response. return response + def execute_sql( + self, + request: Optional[Union[service.ExecuteSqlRequest, dict]] = None, + *, + instance: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + sql_statement: Optional[str] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ExecuteSqlResponse: + r"""Executes a SQL statement in a database inside an + AlloyDB instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_execute_sql(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.ExecuteSqlRequest, dict]): + The request object. Request for ExecuteSql rpc. + instance (str): + Required. The instance where the SQL + will be executed. For the required + format, see the comment on the + Instance.name field. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (str): + Required. Name of the database where the query will be + executed. Note - Value provided should be the same as + expected from ``SELECT current_database();`` and NOT as + a resource reference. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + user (str): + Required. Database user to be used for executing the + SQL. Note - Value provided should be the same as + expected from ``SELECT current_user;`` and NOT as a + resource reference. + + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sql_statement (str): + Required. SQL statement to execute on + database. Any valid statement is + permitted, including DDL, DML, DQL + statements. + + This corresponds to the ``sql_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + password (str): + Optional. The database native user’s + password. + + This corresponds to the ``password`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.types.ExecuteSqlResponse: + Execute a SQL statement response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, database, user, sql_statement, password]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ExecuteSqlRequest): + request = service.ExecuteSqlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if database is not None: + request.database = database + if user is not None: + request.user = user + if sql_statement is not None: + request.sql_statement = sql_statement + if password is not None: + request.password = password + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def list_backups( self, request: Optional[Union[service.ListBackupsRequest, dict]] = None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py index df2ea4799505..a05cef12ef42 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py @@ -171,6 +171,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.upgrade_cluster: gapic_v1.method.wrap_method( + self.upgrade_cluster, + default_timeout=None, + client_info=client_info, + ), self.delete_cluster: gapic_v1.method.wrap_method( self.delete_cluster, default_timeout=None, @@ -181,6 +186,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.switchover_cluster: gapic_v1.method.wrap_method( + self.switchover_cluster, + default_timeout=None, + client_info=client_info, + ), self.restore_cluster: gapic_v1.method.wrap_method( self.restore_cluster, default_timeout=None, @@ -259,6 +269,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.execute_sql: gapic_v1.method.wrap_method( + self.execute_sql, + default_timeout=None, + client_info=client_info, + ), self.list_backups: gapic_v1.method.wrap_method( self.list_backups, default_retry=retries.Retry( @@ -389,16 +404,7 @@ def _prep_wrapped_messages(self, client_info): ), self.list_databases: gapic_v1.method.wrap_method( self.list_databases, - default_retry=retries.Retry( - initial=1.0, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.get_location: gapic_v1.method.wrap_method( @@ -483,6 +489,15 @@ def update_cluster( ]: raise NotImplementedError() + @property + def upgrade_cluster( + self, + ) -> Callable[ + [service.UpgradeClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def delete_cluster( self, @@ -501,6 +516,15 @@ def promote_cluster( ]: raise NotImplementedError() + @property + def switchover_cluster( + self, + ) -> Callable[ + [service.SwitchoverClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def restore_cluster( self, @@ -609,6 +633,15 @@ def restart_instance( ]: raise NotImplementedError() + @property + def execute_sql( + self, + ) -> Callable[ + [service.ExecuteSqlRequest], + Union[service.ExecuteSqlResponse, Awaitable[service.ExecuteSqlResponse]], + ]: + raise NotImplementedError() + @property def list_backups( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py index fa16dde182e4..5a23f92cecfe 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py @@ -357,6 +357,33 @@ def update_cluster( ) return self._stubs["update_cluster"] + @property + def upgrade_cluster( + self, + ) -> Callable[[service.UpgradeClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the upgrade cluster method over gRPC. + + Upgrades a single Cluster. + Imperative only. + + Returns: + Callable[[~.UpgradeClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_cluster" not in self._stubs: + self._stubs["upgrade_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpgradeCluster", + request_serializer=service.UpgradeClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_cluster"] + @property def delete_cluster( self, @@ -412,6 +439,35 @@ def promote_cluster( ) return self._stubs["promote_cluster"] + @property + def switchover_cluster( + self, + ) -> Callable[[service.SwitchoverClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the switchover cluster method over gRPC. + + Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + Returns: + Callable[[~.SwitchoverClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switchover_cluster" not in self._stubs: + self._stubs["switchover_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/SwitchoverCluster", + request_serializer=service.SwitchoverClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switchover_cluster"] + @property def restore_cluster( self, @@ -746,6 +802,33 @@ def restart_instance( ) return self._stubs["restart_instance"] + @property + def execute_sql( + self, + ) -> Callable[[service.ExecuteSqlRequest], service.ExecuteSqlResponse]: + r"""Return a callable for the execute sql method over gRPC. + + Executes a SQL statement in a database inside an + AlloyDB instance. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.ExecuteSqlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_sql" not in self._stubs: + self._stubs["execute_sql"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ExecuteSql", + request_serializer=service.ExecuteSqlRequest.serialize, + response_deserializer=service.ExecuteSqlResponse.deserialize, + ) + return self._stubs["execute_sql"] + @property def list_backups( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py index c42bbe04dfb5..ed1893416b12 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py @@ -371,6 +371,33 @@ def update_cluster( ) return self._stubs["update_cluster"] + @property + def upgrade_cluster( + self, + ) -> Callable[[service.UpgradeClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the upgrade cluster method over gRPC. + + Upgrades a single Cluster. + Imperative only. + + Returns: + Callable[[~.UpgradeClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_cluster" not in self._stubs: + self._stubs["upgrade_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/UpgradeCluster", + request_serializer=service.UpgradeClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_cluster"] + @property def delete_cluster( self, @@ -426,6 +453,37 @@ def promote_cluster( ) return self._stubs["promote_cluster"] + @property + def switchover_cluster( + self, + ) -> Callable[ + [service.SwitchoverClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the switchover cluster method over gRPC. + + Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + Returns: + Callable[[~.SwitchoverClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switchover_cluster" not in self._stubs: + self._stubs["switchover_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/SwitchoverCluster", + request_serializer=service.SwitchoverClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switchover_cluster"] + @property def restore_cluster( self, @@ -772,6 +830,33 @@ def restart_instance( ) return self._stubs["restart_instance"] + @property + def execute_sql( + self, + ) -> Callable[[service.ExecuteSqlRequest], Awaitable[service.ExecuteSqlResponse]]: + r"""Return a callable for the execute sql method over gRPC. + + Executes a SQL statement in a database inside an + AlloyDB instance. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.ExecuteSqlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_sql" not in self._stubs: + self._stubs["execute_sql"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ExecuteSql", + request_serializer=service.ExecuteSqlRequest.serialize, + response_deserializer=service.ExecuteSqlResponse.deserialize, + ) + return self._stubs["execute_sql"] + @property def list_backups( self, @@ -1195,6 +1280,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.upgrade_cluster: self._wrap_method( + self.upgrade_cluster, + default_timeout=None, + client_info=client_info, + ), self.delete_cluster: self._wrap_method( self.delete_cluster, default_timeout=None, @@ -1205,6 +1295,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.switchover_cluster: self._wrap_method( + self.switchover_cluster, + default_timeout=None, + client_info=client_info, + ), self.restore_cluster: self._wrap_method( self.restore_cluster, default_timeout=None, @@ -1283,6 +1378,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.execute_sql: self._wrap_method( + self.execute_sql, + default_timeout=None, + client_info=client_info, + ), self.list_backups: self._wrap_method( self.list_backups, default_retry=retries.AsyncRetry( @@ -1413,16 +1513,7 @@ def _prep_wrapped_messages(self, client_info): ), self.list_databases: self._wrap_method( self.list_databases, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.get_location: self._wrap_method( diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py index f62b9a024e7a..eb1c9315c093 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py @@ -149,6 +149,14 @@ def pre_delete_user(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_execute_sql(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_sql(self, response): + logging.log(f"Received response: {response}") + return response + def pre_failover_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -285,6 +293,14 @@ def post_restore_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_switchover_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_switchover_cluster(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_backup(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -317,6 +333,14 @@ def post_update_user(self, response): logging.log(f"Received response: {response}") return response + def pre_upgrade_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_upgrade_cluster(self, response): + logging.log(f"Received response: {response}") + return response + transport = AlloyDBAdminRestTransport(interceptor=MyCustomAlloyDBAdminInterceptor()) client = AlloyDBAdminClient(transport=transport) @@ -551,6 +575,27 @@ def pre_delete_user( """ return request, metadata + def pre_execute_sql( + self, request: service.ExecuteSqlRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ExecuteSqlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for execute_sql + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_execute_sql( + self, response: service.ExecuteSqlResponse + ) -> service.ExecuteSqlResponse: + """Post-rpc interceptor for execute_sql + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + def pre_failover_instance( self, request: service.FailoverInstanceRequest, @@ -914,6 +959,29 @@ def post_restore_cluster( """ return response + def pre_switchover_cluster( + self, + request: service.SwitchoverClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.SwitchoverClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for switchover_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_switchover_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for switchover_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + def pre_update_backup( self, request: service.UpdateBackupRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[service.UpdateBackupRequest, Sequence[Tuple[str, str]]]: @@ -998,6 +1066,29 @@ def post_update_user(self, response: resources.User) -> resources.User: """ return response + def pre_upgrade_cluster( + self, + request: service.UpgradeClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpgradeClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for upgrade_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_upgrade_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for upgrade_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -2305,6 +2396,106 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _ExecuteSql( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.ExecuteSql") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.ExecuteSqlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ExecuteSqlResponse: + r"""Call the execute sql method over HTTP. + + Args: + request (~.service.ExecuteSqlRequest): + The request object. Request for ExecuteSql rpc. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ExecuteSqlResponse: + Execute a SQL statement response. + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_http_options() + ) + request, metadata = self._interceptor.pre_execute_sql(request, metadata) + transcoded_request = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_query_params_json( + transcoded_request + ) + ) + + # Send the request + response = AlloyDBAdminRestTransport._ExecuteSql._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ExecuteSqlResponse() + pb_resp = service.ExecuteSqlResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_execute_sql(resp) + return resp + class _FailoverInstance( _BaseAlloyDBAdminRestTransport._BaseFailoverInstance, AlloyDBAdminRestStub ): @@ -3926,6 +4117,104 @@ def __call__( resp = self._interceptor.post_restore_cluster(resp) return resp + class _SwitchoverCluster( + _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.SwitchoverCluster") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.SwitchoverClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the switchover cluster method over HTTP. + + Args: + request (~.service.SwitchoverClusterRequest): + The request object. Message for switching over to a + cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_http_options() + ) + request, metadata = self._interceptor.pre_switchover_cluster( + request, metadata + ) + transcoded_request = _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_transcoded_request( + http_options, request + ) + + body = _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_query_params_json( + transcoded_request + ) + + # Send the request + response = AlloyDBAdminRestTransport._SwitchoverCluster._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_switchover_cluster(resp) + return resp + class _UpdateBackup( _BaseAlloyDBAdminRestTransport._BaseUpdateBackup, AlloyDBAdminRestStub ): @@ -4315,6 +4604,101 @@ def __call__( resp = self._interceptor.post_update_user(resp) return resp + class _UpgradeCluster( + _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.UpgradeCluster") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.UpgradeClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the upgrade cluster method over HTTP. + + Args: + request (~.service.UpgradeClusterRequest): + The request object. Upgrades a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_http_options() + ) + request, metadata = self._interceptor.pre_upgrade_cluster(request, metadata) + transcoded_request = _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_transcoded_request( + http_options, request + ) + + body = _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_query_params_json( + transcoded_request + ) + + # Send the request + response = AlloyDBAdminRestTransport._UpgradeCluster._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_upgrade_cluster(resp) + return resp + @property def batch_create_instances( self, @@ -4399,6 +4783,14 @@ def delete_user(self) -> Callable[[service.DeleteUserRequest], empty_pb2.Empty]: # In C++ this would require a dynamic_cast return self._DeleteUser(self._session, self._host, self._interceptor) # type: ignore + @property + def execute_sql( + self, + ) -> Callable[[service.ExecuteSqlRequest], service.ExecuteSqlResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteSql(self._session, self._host, self._interceptor) # type: ignore + @property def failover_instance( self, @@ -4535,6 +4927,14 @@ def restore_cluster( # In C++ this would require a dynamic_cast return self._RestoreCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def switchover_cluster( + self, + ) -> Callable[[service.SwitchoverClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SwitchoverCluster(self._session, self._host, self._interceptor) # type: ignore + @property def update_backup( self, @@ -4565,6 +4965,14 @@ def update_user(self) -> Callable[[service.UpdateUserRequest], resources.User]: # In C++ this would require a dynamic_cast return self._UpdateUser(self._session, self._host, self._interceptor) # type: ignore + @property + def upgrade_cluster( + self, + ) -> Callable[[service.UpgradeClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpgradeCluster(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py index dfcc50d12b0c..10c2b2fd70b6 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py @@ -691,6 +691,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseExecuteSql: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{instance=projects/*/locations/*/clusters/*/instances/*}:executeSql", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ExecuteSqlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseFailoverInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1550,6 +1607,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseSwitchoverCluster: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*}:switchover", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.SwitchoverClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateBackup: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1778,6 +1892,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpgradeCluster: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{name=projects/*/locations/*/clusters/*}:upgrade", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.UpgradeClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetLocation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py index 69269be33581..c53192fde938 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .csql_resources import CloudSQLBackupRunSource +from .data_model import SqlResult, SqlResultColumn, SqlResultRow, SqlResultValue +from .gemini import GeminiClusterConfig, GeminiInstanceConfig from .resources import ( AutomatedBackupPolicy, Backup, @@ -29,8 +32,11 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, + SubscriptionType, SupportedDatabaseFlag, User, UserPassword, @@ -51,6 +57,9 @@ DeleteClusterRequest, DeleteInstanceRequest, DeleteUserRequest, + ExecuteSqlMetadata, + ExecuteSqlRequest, + ExecuteSqlResponse, FailoverInstanceRequest, GenerateClientCertificateRequest, GenerateClientCertificateResponse, @@ -74,15 +83,27 @@ ListUsersResponse, OperationMetadata, PromoteClusterRequest, + PromoteClusterStatus, RestartInstanceRequest, RestoreClusterRequest, + SwitchoverClusterRequest, UpdateBackupRequest, UpdateClusterRequest, UpdateInstanceRequest, UpdateUserRequest, + UpgradeClusterRequest, + UpgradeClusterResponse, + UpgradeClusterStatus, ) __all__ = ( + "CloudSQLBackupRunSource", + "SqlResult", + "SqlResultColumn", + "SqlResultRow", + "SqlResultValue", + "GeminiClusterConfig", + "GeminiInstanceConfig", "AutomatedBackupPolicy", "Backup", "BackupSource", @@ -95,6 +116,8 @@ "EncryptionConfig", "EncryptionInfo", "Instance", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "SslConfig", "SupportedDatabaseFlag", @@ -103,6 +126,7 @@ "ClusterView", "DatabaseVersion", "InstanceView", + "SubscriptionType", "BatchCreateInstancesMetadata", "BatchCreateInstancesRequest", "BatchCreateInstancesResponse", @@ -118,6 +142,9 @@ "DeleteClusterRequest", "DeleteInstanceRequest", "DeleteUserRequest", + "ExecuteSqlMetadata", + "ExecuteSqlRequest", + "ExecuteSqlResponse", "FailoverInstanceRequest", "GenerateClientCertificateRequest", "GenerateClientCertificateResponse", @@ -141,10 +168,15 @@ "ListUsersResponse", "OperationMetadata", "PromoteClusterRequest", + "PromoteClusterStatus", "RestartInstanceRequest", "RestoreClusterRequest", + "SwitchoverClusterRequest", "UpdateBackupRequest", "UpdateClusterRequest", "UpdateInstanceRequest", "UpdateUserRequest", + "UpgradeClusterRequest", + "UpgradeClusterResponse", + "UpgradeClusterStatus", ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/csql_resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/csql_resources.py new file mode 100644 index 000000000000..9c0e0a2c5dce --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/csql_resources.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1alpha", + manifest={ + "CloudSQLBackupRunSource", + }, +) + + +class CloudSQLBackupRunSource(proto.Message): + r"""The source CloudSQL backup resource. + + Attributes: + project (str): + The project ID of the source CloudSQL + instance. This should be the same as the AlloyDB + cluster's project. + instance_id (str): + Required. The CloudSQL instance ID. + backup_run_id (int): + Required. The CloudSQL backup run ID. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_run_id: int = proto.Field( + proto.INT64, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/data_model.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/data_model.py new file mode 100644 index 000000000000..3dbbe07591b1 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/data_model.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1alpha", + manifest={ + "SqlResult", + "SqlResultColumn", + "SqlResultRow", + "SqlResultValue", + }, +) + + +class SqlResult(proto.Message): + r"""SqlResult represents the result for the execution of a sql + statement. + + Attributes: + columns (MutableSequence[google.cloud.alloydb_v1alpha.types.SqlResultColumn]): + List of columns included in the result. This + also includes the data type of the column. + rows (MutableSequence[google.cloud.alloydb_v1alpha.types.SqlResultRow]): + Rows returned by the SQL statement. + """ + + columns: MutableSequence["SqlResultColumn"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SqlResultColumn", + ) + rows: MutableSequence["SqlResultRow"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="SqlResultRow", + ) + + +class SqlResultColumn(proto.Message): + r"""Contains the name and datatype of a column in a SQL Result. + + Attributes: + name (str): + Name of the column. + type_ (str): + Datatype of the column as reported by the + postgres driver. Common type names are + "VARCHAR", "TEXT", "NVARCHAR", "DECIMAL", + "BOOL", "INT", and "BIGINT". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SqlResultRow(proto.Message): + r"""A single row from a sql result. + + Attributes: + values (MutableSequence[google.cloud.alloydb_v1alpha.types.SqlResultValue]): + List of values in a row of sql result. + """ + + values: MutableSequence["SqlResultValue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SqlResultValue", + ) + + +class SqlResultValue(proto.Message): + r"""A single value in a row from a sql result. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + The cell value represented in string format. + Timestamps are converted to string using + RFC3339Nano format. + + This field is a member of `oneof`_ ``_value``. + null_value (bool): + Set to true if cell value is null. + + This field is a member of `oneof`_ ``_null_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + null_value: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/gemini.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/gemini.py new file mode 100644 index 000000000000..b2508d78eeec --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/gemini.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1alpha", + manifest={ + "GeminiClusterConfig", + "GeminiInstanceConfig", + }, +) + + +class GeminiClusterConfig(proto.Message): + r"""Cluster level configuration parameters related to the Gemini + in Databases add-on. + + Attributes: + entitled (bool): + Output only. Whether the Gemini in Databases + add-on is enabled for the cluster. It will be + true only if the add-on has been enabled for the + billing account corresponding to the cluster. + Its status is toggled from the Admin Control + Center (ACC) and cannot be toggled using + AlloyDB's APIs. + """ + + entitled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GeminiInstanceConfig(proto.Message): + r"""Instance level configuration parameters related to the Gemini + in Databases add-on. + + Attributes: + entitled (bool): + Output only. Whether the Gemini in Databases + add-on is enabled for the instance. It will be + true only if the add-on has been enabled for the + billing account corresponding to the instance. + Its status is toggled from the Admin Control + Center (ACC) and cannot be toggled using + AlloyDB's APIs. + """ + + entitled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py index 22d401ff11cc..aa4d9d4068bb 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -24,12 +24,15 @@ from google.type import timeofday_pb2 # type: ignore import proto # type: ignore +from google.cloud.alloydb_v1alpha.types import csql_resources, gemini + __protobuf__ = proto.module( package="google.cloud.alloydb.v1alpha", manifest={ "InstanceView", "ClusterView", "DatabaseVersion", + "SubscriptionType", "UserPassword", "MigrationSource", "EncryptionConfig", @@ -40,6 +43,8 @@ "ContinuousBackupInfo", "BackupSource", "ContinuousBackupSource", + "MaintenanceUpdatePolicy", + "MaintenanceSchedule", "Cluster", "Instance", "ConnectionInfo", @@ -111,11 +116,33 @@ class DatabaseVersion(proto.Enum): The database version is Postgres 14. POSTGRES_15 (3): The database version is Postgres 15. + POSTGRES_16 (4): + The database version is Postgres 16. """ DATABASE_VERSION_UNSPECIFIED = 0 POSTGRES_13 = 1 POSTGRES_14 = 2 POSTGRES_15 = 3 + POSTGRES_16 = 4 + + +class SubscriptionType(proto.Enum): + r"""Subscription_type added to distinguish between Standard and Trial + subscriptions. By default, a subscription type is considered + STANDARD unless explicitly specified. + + Values: + SUBSCRIPTION_TYPE_UNSPECIFIED (0): + This is an unknown subscription type. By + default, the subscription type is STANDARD. + STANDARD (1): + Standard subscription. + TRIAL (2): + Trial subscription. + """ + SUBSCRIPTION_TYPE_UNSPECIFIED = 0 + STANDARD = 1 + TRIAL = 2 class UserPassword(proto.Message): @@ -264,7 +291,7 @@ class SslMode(proto.Enum): Values: SSL_MODE_UNSPECIFIED (0): - SSL mode not specified. Defaults to ENCRYPTED_ONLY. + SSL mode is not specified. Defaults to ENCRYPTED_ONLY. SSL_MODE_ALLOW (1): SSL connections are optional. CA verification not enforced. @@ -276,7 +303,7 @@ class SslMode(proto.Enum): SSL_MODE_VERIFY_CA (3): SSL connections are required. CA verification enforced. Clients must have certificates signed - by a Cluster CA, e.g. via + by a Cluster CA, for example, using GenerateClientCertificate. ALLOW_UNENCRYPTED_AND_ENCRYPTED (4): SSL connections are optional. CA verification @@ -622,6 +649,69 @@ class ContinuousBackupSource(proto.Message): ) +class MaintenanceUpdatePolicy(proto.Message): + r"""MaintenanceUpdatePolicy defines the policy for system + updates. + + Attributes: + maintenance_windows (MutableSequence[google.cloud.alloydb_v1alpha.types.MaintenanceUpdatePolicy.MaintenanceWindow]): + Preferred windows to perform maintenance. + Currently limited to 1. + """ + + class MaintenanceWindow(proto.Message): + r"""MaintenanceWindow specifies a preferred day and time for + maintenance. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Preferred day of the week for maintenance, + e.g. MONDAY, TUESDAY, etc. + start_time (google.type.timeofday_pb2.TimeOfDay): + Preferred time to start the maintenance + operation on the specified day. Maintenance will + start within 1 hour of this time. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + + maintenance_windows: MutableSequence[MaintenanceWindow] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=MaintenanceWindow, + ) + + +class MaintenanceSchedule(proto.Message): + r"""MaintenanceSchedule stores the maintenance schedule generated + from the MaintenanceUpdatePolicy, once a maintenance rollout is + triggered, if MaintenanceWindow is set, and if there is no + conflicting DenyPeriod. The schedule is cleared once the update + takes place. This field cannot be manually changed; modify the + MaintenanceUpdatePolicy instead. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The scheduled start time for the + maintenance. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + class Cluster(proto.Message): r"""A cluster is a collection of regional AlloyDB resources. It can include a primary instance and one or more read pool @@ -644,6 +734,11 @@ class Cluster(proto.Message): Output only. Cluster created via DMS migration. + This field is a member of `oneof`_ ``source``. + cloudsql_backup_run_source (google.cloud.alloydb_v1alpha.types.CloudSQLBackupRunSource): + Output only. Cluster created from CloudSQL + snapshot. + This field is a member of `oneof`_ ``source``. name (str): Output only. The name of the cluster resource with the @@ -693,7 +788,7 @@ class Cluster(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project}/global/networks/{network_id}". This is + ``projects/{project}/global/networks/{network_id}``. This is required to create a cluster. Deprecated, use network_config.network instead. etag (str): @@ -757,6 +852,28 @@ class Cluster(proto.Message): psc_config (google.cloud.alloydb_v1alpha.types.Cluster.PscConfig): Optional. The configuration for Private Service Connect (PSC) for the cluster. + maintenance_update_policy (google.cloud.alloydb_v1alpha.types.MaintenanceUpdatePolicy): + Optional. The maintenance update policy + determines when to allow or deny updates. + maintenance_schedule (google.cloud.alloydb_v1alpha.types.MaintenanceSchedule): + Output only. The maintenance schedule for the + cluster, generated for a specific rollout if a + maintenance window is set. + gemini_config (google.cloud.alloydb_v1alpha.types.GeminiClusterConfig): + Optional. Configuration parameters related to + the Gemini in Databases add-on. + subscription_type (google.cloud.alloydb_v1alpha.types.SubscriptionType): + Optional. Subscription type of the cluster. + trial_metadata (google.cloud.alloydb_v1alpha.types.Cluster.TrialMetadata): + Output only. Metadata for free trial clusters + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag keys/values directly + bound to this resource. For example: + + :: + + "123/environment": "production", + "123/costCenter": "marketing". """ class State(proto.Enum): @@ -835,7 +952,7 @@ class NetworkConfig(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project_number}/global/networks/{network_id}". + ``projects/{project_number}/global/networks/{network_id}``. This is required to create a cluster. allocated_ip_range (str): Optional. Name of the allocated IP range for the private IP @@ -898,12 +1015,57 @@ class PscConfig(proto.Message): Optional. Create an instance that allows connections from Private Service Connect endpoints to the instance. + service_owned_project_number (int): + Output only. The project number that needs to + be allowlisted on the network attachment to + enable outbound connectivity. """ psc_enabled: bool = proto.Field( proto.BOOL, number=1, ) + service_owned_project_number: int = proto.Field( + proto.INT64, + number=3, + ) + + class TrialMetadata(proto.Message): + r"""Contains information and all metadata related to TRIAL + clusters. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + start time of the trial cluster. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of the trial cluster. + upgrade_time (google.protobuf.timestamp_pb2.Timestamp): + Upgrade time of trial cluster to Standard + cluster. + grace_end_time (google.protobuf.timestamp_pb2.Timestamp): + grace end time of the cluster. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + upgrade_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + grace_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) backup_source: "BackupSource" = proto.Field( proto.MESSAGE, @@ -917,6 +1079,12 @@ class PscConfig(proto.Message): oneof="source", message="MigrationSource", ) + cloudsql_backup_run_source: csql_resources.CloudSQLBackupRunSource = proto.Field( + proto.MESSAGE, + number=42, + oneof="source", + message=csql_resources.CloudSQLBackupRunSource, + ) name: str = proto.Field( proto.STRING, number=1, @@ -1044,6 +1212,36 @@ class PscConfig(proto.Message): number=31, message=PscConfig, ) + maintenance_update_policy: "MaintenanceUpdatePolicy" = proto.Field( + proto.MESSAGE, + number=32, + message="MaintenanceUpdatePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=37, + message="MaintenanceSchedule", + ) + gemini_config: gemini.GeminiClusterConfig = proto.Field( + proto.MESSAGE, + number=36, + message=gemini.GeminiClusterConfig, + ) + subscription_type: "SubscriptionType" = proto.Field( + proto.ENUM, + number=38, + enum="SubscriptionType", + ) + trial_metadata: TrialMetadata = proto.Field( + proto.MESSAGE, + number=39, + message=TrialMetadata, + ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=41, + ) class Instance(proto.Message): @@ -1132,6 +1330,8 @@ class Instance(proto.Message): PRIMARY instance. query_insights_config (google.cloud.alloydb_v1alpha.types.Instance.QueryInsightsInstanceConfig): Configuration for query insights. + observability_config (google.cloud.alloydb_v1alpha.types.Instance.ObservabilityInstanceConfig): + Configuration for observability. read_pool_config (google.cloud.alloydb_v1alpha.types.Instance.ReadPoolConfig): Read pool instance configuration. This is required if the value of instanceType is READ_POOL. @@ -1177,8 +1377,14 @@ class Instance(proto.Message): Optional. The configuration for Private Service Connect (PSC) for the instance. network_config (google.cloud.alloydb_v1alpha.types.Instance.InstanceNetworkConfig): - Optional. Instance level network + Optional. Instance-level network configuration. + gemini_config (google.cloud.alloydb_v1alpha.types.GeminiInstanceConfig): + Optional. Configuration parameters related to + the Gemini in Databases add-on. + outbound_public_ip_addresses (MutableSequence[str]): + Output only. All outbound public IP addresses + configured for the instance. """ class State(proto.Enum): @@ -1374,6 +1580,114 @@ class QueryInsightsInstanceConfig(proto.Message): optional=True, ) + class ObservabilityInstanceConfig(proto.Message): + r"""Observability Instance specific configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Observability feature status for an instance. + This flag is turned "off" by default. + + This field is a member of `oneof`_ ``_enabled``. + preserve_comments (bool): + Preserve comments in query string for an + instance. This flag is turned "off" by default. + + This field is a member of `oneof`_ ``_preserve_comments``. + track_wait_events (bool): + Track wait events during query execution for + an instance. This flag is turned "on" by default + but tracking is enabled only after observability + enabled flag is also turned on. + + This field is a member of `oneof`_ ``_track_wait_events``. + track_wait_event_types (bool): + Output only. Track wait event types during + query execution for an instance. This flag is + turned "on" by default but tracking is enabled + only after observability enabled flag is also + turned on. This is read-only flag and only + modifiable by producer API. + + This field is a member of `oneof`_ ``_track_wait_event_types``. + max_query_string_length (int): + Query string length. The default value is + 10k. + + This field is a member of `oneof`_ ``_max_query_string_length``. + record_application_tags (bool): + Record application tags for an instance. + This flag is turned "off" by default. + + This field is a member of `oneof`_ ``_record_application_tags``. + query_plans_per_minute (int): + Number of query execution plans captured by + Insights per minute for all queries combined. + The default value is 200. Any integer between 0 + to 200 is considered valid. + + This field is a member of `oneof`_ ``_query_plans_per_minute``. + track_active_queries (bool): + Track actively running queries on the + instance. If not set, this flag is "off" by + default. + + This field is a member of `oneof`_ ``_track_active_queries``. + track_client_address (bool): + Track client address for an instance. + If not set, default value is "off". + + This field is a member of `oneof`_ ``_track_client_address``. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + preserve_comments: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + track_wait_events: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + track_wait_event_types: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + max_query_string_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + record_application_tags: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + query_plans_per_minute: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + track_active_queries: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + track_client_address: bool = proto.Field( + proto.BOOL, + number=9, + optional=True, + ) + class ReadPoolConfig(proto.Message): r"""Configuration for a read pool instance. @@ -1427,7 +1741,7 @@ class ClientConnectionConfig(proto.Message): only (ex: AuthProxy) connections to the database. ssl_config (google.cloud.alloydb_v1alpha.types.SslConfig): - Optional. SSL config option for this + Optional. SSL configuration option for this instance. """ @@ -1442,34 +1756,23 @@ class ClientConnectionConfig(proto.Message): ) class PscInterfaceConfig(proto.Message): - r"""Configuration for setting up a PSC interface. This - information needs to be provided by the customer. - PSC interfaces will be created and added to VMs via SLM (adding - a network interface will require recreating the VM). For HA - instances this will be done via LDTM. + r"""Configuration for setting up a PSC interface to enable + outbound connectivity. Attributes: - consumer_endpoint_ips (MutableSequence[str]): - A list of endpoints in the consumer VPC the - interface might initiate outbound connections - to. This list has to be provided when the PSC - interface is created. - network_attachment (str): - The NetworkAttachment resource created in the consumer VPC - to which the PSC interface will be linked, in the form of: + network_attachment_resource (str): + The network attachment resource created in the consumer + network to which the PSC interface will be linked. This is + of the format: "projects/${CONSUMER_PROJECT}/regions/${REGION}/networkAttachments/${NETWORK_ATTACHMENT_NAME}". - NetworkAttachment has to be provided when the PSC interface - is created. + The network attachment must be in the same region as the + instance. """ - consumer_endpoint_ips: MutableSequence[str] = proto.RepeatedField( + network_attachment_resource: str = proto.Field( proto.STRING, number=1, ) - network_attachment: str = proto.Field( - proto.STRING, - number=2, - ) class PscInstanceConfig(proto.Message): r"""PscInstanceConfig contains PSC related configuration at an @@ -1477,36 +1780,24 @@ class PscInstanceConfig(proto.Message): Attributes: service_attachment_link (str): - Output only. The service attachment created - when Private Service Connect (PSC) is enabled - for the instance. The name of the resource will - be in the format of - projects//regions//serviceAttachments/ + Output only. The service attachment created when Private + Service Connect (PSC) is enabled for the instance. The name + of the resource will be in the format of + ``projects//regions//serviceAttachments/`` allowed_consumer_projects (MutableSequence[str]): Optional. List of consumer projects that are allowed to create PSC endpoints to service-attachments to this instance. - allowed_consumer_networks (MutableSequence[str]): - Optional. List of consumer networks that are - allowed to create PSC endpoints to - service-attachments to this instance. + psc_dns_name (str): + Output only. The DNS name of the instance for + PSC connectivity. Name convention: + ...alloydb-psc.goog psc_interface_configs (MutableSequence[google.cloud.alloydb_v1alpha.types.Instance.PscInterfaceConfig]): Optional. Configurations for setting up PSC interfaces attached to the instance which are used for outbound connectivity. Only primary - instances can have PSC interface attached. All - the VMs created for the primary instance will - share the same configurations. Currently we only - support 0 or 1 PSC interface. - outgoing_service_attachment_links (MutableSequence[str]): - Optional. List of service attachments that - this instance has created endpoints to connect - with. Currently, only a single outgoing service - attachment is supported per instance. - psc_enabled (bool): - Optional. Whether PSC connectivity is enabled - for this instance. This is populated by - referencing the value from the parent cluster. + instances can have PSC interface attached. + Currently we only support 0 or 1 PSC interface. """ service_attachment_link: str = proto.Field( @@ -1517,28 +1808,20 @@ class PscInstanceConfig(proto.Message): proto.STRING, number=2, ) - allowed_consumer_networks: MutableSequence[str] = proto.RepeatedField( + psc_dns_name: str = proto.Field( proto.STRING, - number=3, + number=7, ) psc_interface_configs: MutableSequence[ "Instance.PscInterfaceConfig" ] = proto.RepeatedField( proto.MESSAGE, - number=4, + number=8, message="Instance.PscInterfaceConfig", ) - outgoing_service_attachment_links: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - psc_enabled: bool = proto.Field( - proto.BOOL, - number=6, - ) class InstanceNetworkConfig(proto.Message): - r"""Metadata related to instance level network configuration. + r"""Metadata related to instance-level network configuration. Attributes: authorized_external_networks (MutableSequence[google.cloud.alloydb_v1alpha.types.Instance.InstanceNetworkConfig.AuthorizedNetwork]): @@ -1547,6 +1830,10 @@ class InstanceNetworkConfig(proto.Message): enable_public_ip (bool): Optional. Enabling public ip for the instance. + enable_outbound_public_ip (bool): + Optional. Enabling an outbound public IP + address to support a database server sending + requests out into the internet. """ class AuthorizedNetwork(proto.Message): @@ -1575,6 +1862,10 @@ class AuthorizedNetwork(proto.Message): proto.BOOL, number=2, ) + enable_outbound_public_ip: bool = proto.Field( + proto.BOOL, + number=3, + ) name: str = proto.Field( proto.STRING, @@ -1652,6 +1943,11 @@ class AuthorizedNetwork(proto.Message): number=21, message=QueryInsightsInstanceConfig, ) + observability_config: ObservabilityInstanceConfig = proto.Field( + proto.MESSAGE, + number=26, + message=ObservabilityInstanceConfig, + ) read_pool_config: ReadPoolConfig = proto.Field( proto.MESSAGE, number=14, @@ -1706,6 +2002,15 @@ class AuthorizedNetwork(proto.Message): number=29, message=InstanceNetworkConfig, ) + gemini_config: gemini.GeminiInstanceConfig = proto.Field( + proto.MESSAGE, + number=33, + message=gemini.GeminiInstanceConfig, + ) + outbound_public_ip_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=34, + ) class ConnectionInfo(proto.Message): @@ -1857,6 +2162,14 @@ class Backup(proto.Message): version of the cluster this backup was created from. Any restored cluster created from this backup will have the same database version. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag keys/values directly + bound to this resource. For example: + + :: + + "123/environment": "production", + "123/costCenter": "marketing". """ class State(proto.Enum): @@ -2042,6 +2355,11 @@ class QuantityBasedExpiry(proto.Message): number=22, enum="DatabaseVersion", ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=25, + ) class SupportedDatabaseFlag(proto.Message): @@ -2213,6 +2531,9 @@ class User(proto.Message): the PostgreSQL naming conventions. user_type (google.cloud.alloydb_v1alpha.types.User.UserType): Optional. Type of this user. + keep_extra_roles (bool): + Input only. If the user already exists and it + has additional roles, keep them granted. """ class UserType(proto.Enum): @@ -2249,6 +2570,10 @@ class UserType(proto.Enum): number=5, enum=UserType, ) + keep_extra_roles: bool = proto.Field( + proto.BOOL, + number=6, + ) class Database(proto.Message): @@ -2256,9 +2581,8 @@ class Database(proto.Message): Attributes: name (str): - Identifier. Name of the resource in the form - of - projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}. + Identifier. Name of the resource in the form of + ``projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}``. charset (str): Optional. Charset for the database. This field can contain any PostgreSQL supported charset name. Example values diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py index 2d7f2fd45a97..0f1d16264801 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py @@ -23,7 +23,7 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.alloydb_v1alpha.types import resources +from google.cloud.alloydb_v1alpha.types import data_model, resources __protobuf__ = proto.module( package="google.cloud.alloydb.v1alpha", @@ -34,7 +34,10 @@ "CreateSecondaryClusterRequest", "CreateClusterRequest", "UpdateClusterRequest", + "UpgradeClusterRequest", + "UpgradeClusterResponse", "DeleteClusterRequest", + "SwitchoverClusterRequest", "PromoteClusterRequest", "RestoreClusterRequest", "ListInstancesRequest", @@ -52,6 +55,9 @@ "FailoverInstanceRequest", "InjectFaultRequest", "RestartInstanceRequest", + "ExecuteSqlRequest", + "ExecuteSqlResponse", + "ExecuteSqlMetadata", "ListBackupsRequest", "ListBackupsResponse", "GetBackupRequest", @@ -64,6 +70,8 @@ "GenerateClientCertificateResponse", "GetConnectionInfoRequest", "OperationMetadata", + "PromoteClusterStatus", + "UpgradeClusterStatus", "ListUsersRequest", "ListUsersResponse", "GetUserRequest", @@ -196,17 +204,17 @@ class CreateSecondaryClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -214,10 +222,10 @@ class CreateSecondaryClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -258,17 +266,17 @@ class CreateClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -276,10 +284,10 @@ class CreateClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -321,17 +329,17 @@ class UpdateClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -339,10 +347,10 @@ class UpdateClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - update request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. allow_missing (bool): Optional. If set to true, update succeeds even if cluster is not found. In that case, a new cluster is created and @@ -373,6 +381,284 @@ class UpdateClusterRequest(proto.Message): ) +class UpgradeClusterRequest(proto.Message): + r"""Upgrades a cluster. + + Attributes: + name (str): + Required. The resource name of the cluster. + version (google.cloud.alloydb_v1alpha.types.DatabaseVersion): + Required. The version the cluster is going to + be upgraded to. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. + etag (str): + Optional. The current etag of the Cluster. + If an etag is provided and does not match the + current etag of the Cluster, upgrade will be + blocked and an ABORTED error will be returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=2, + enum=resources.DatabaseVersion, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + etag: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpgradeClusterResponse(proto.Message): + r"""UpgradeClusterResponse contains the response for upgrade + cluster operation. + + Attributes: + status (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Status): + Status of upgrade operation. + message (str): + A user friendly message summarising the + upgrade operation details and the next steps for + the user if there is any. + cluster_upgrade_details (MutableSequence[google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.ClusterUpgradeDetails]): + Array of upgrade details for the current + cluster and all the secondary clusters + associated with this cluster. + """ + + class Status(proto.Enum): + r"""Status of upgrade operation. + + Values: + STATUS_UNSPECIFIED (0): + Unspecified status. + NOT_STARTED (4): + Not started. + IN_PROGRESS (5): + In progress. + SUCCESS (1): + Operation succeeded. + FAILED (2): + Operation failed. + PARTIAL_SUCCESS (3): + Operation partially succeeded. + CANCEL_IN_PROGRESS (6): + Cancel is in progress. + CANCELLED (7): + Cancellation complete. + """ + STATUS_UNSPECIFIED = 0 + NOT_STARTED = 4 + IN_PROGRESS = 5 + SUCCESS = 1 + FAILED = 2 + PARTIAL_SUCCESS = 3 + CANCEL_IN_PROGRESS = 6 + CANCELLED = 7 + + class Stage(proto.Enum): + r"""Stage in the upgrade. + + Values: + STAGE_UNSPECIFIED (0): + Unspecified stage. + ALLOYDB_PRECHECK (1): + Pre-upgrade custom checks, not covered by pg_upgrade. + PG_UPGRADE_CHECK (2): + Pre-upgrade pg_upgrade checks. + PREPARE_FOR_UPGRADE (5): + Clone the original cluster. + PRIMARY_INSTANCE_UPGRADE (3): + Upgrade the primary instance(downtime). + READ_POOL_INSTANCES_UPGRADE (4): + This stage is read pool upgrade. + ROLLBACK (6): + Rollback in case of critical failures. + CLEANUP (7): + Cleanup. + """ + STAGE_UNSPECIFIED = 0 + ALLOYDB_PRECHECK = 1 + PG_UPGRADE_CHECK = 2 + PREPARE_FOR_UPGRADE = 5 + PRIMARY_INSTANCE_UPGRADE = 3 + READ_POOL_INSTANCES_UPGRADE = 4 + ROLLBACK = 6 + CLEANUP = 7 + + class StageInfo(proto.Message): + r"""Stage information for different stages in the upgrade + process. + + Attributes: + stage (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Stage): + The stage. + status (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Status): + Status of the stage. + logs_url (str): + logs_url is the URL for the logs associated with a stage if + that stage has logs. Right now, only three stages have logs: + ALLOYDB_PRECHECK, PG_UPGRADE_CHECK, + PRIMARY_INSTANCE_UPGRADE. + """ + + stage: "UpgradeClusterResponse.Stage" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeClusterResponse.Stage", + ) + status: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + logs_url: str = proto.Field( + proto.STRING, + number=3, + ) + + class InstanceUpgradeDetails(proto.Message): + r"""Details regarding the upgrade of instaces associated with a + cluster. + + Attributes: + name (str): + Normalized name of the instance. + upgrade_status (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Status): + Upgrade status of the instance. + instance_type (google.cloud.alloydb_v1alpha.types.Instance.InstanceType): + Instance type. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + upgrade_status: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + instance_type: resources.Instance.InstanceType = proto.Field( + proto.ENUM, + number=3, + enum=resources.Instance.InstanceType, + ) + + class ClusterUpgradeDetails(proto.Message): + r"""Upgrade details of a cluster. This cluster can be primary or + secondary. + + Attributes: + name (str): + Normalized name of the cluster + upgrade_status (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Status): + Upgrade status of the cluster. + cluster_type (google.cloud.alloydb_v1alpha.types.Cluster.ClusterType): + Cluster type which can either be primary or + secondary. + database_version (google.cloud.alloydb_v1alpha.types.DatabaseVersion): + Database version of the cluster after the + upgrade operation. This will be the target + version if the upgrade was successful otherwise + it remains the same as that before the upgrade + operation. + stage_info (MutableSequence[google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.StageInfo]): + Array containing stage info associated with + this cluster. + instance_upgrade_details (MutableSequence[google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.InstanceUpgradeDetails]): + Upgrade details of the instances directly + associated with this cluster. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + upgrade_status: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + cluster_type: resources.Cluster.ClusterType = proto.Field( + proto.ENUM, + number=3, + enum=resources.Cluster.ClusterType, + ) + database_version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=4, + enum=resources.DatabaseVersion, + ) + stage_info: MutableSequence[ + "UpgradeClusterResponse.StageInfo" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="UpgradeClusterResponse.StageInfo", + ) + instance_upgrade_details: MutableSequence[ + "UpgradeClusterResponse.InstanceUpgradeDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="UpgradeClusterResponse.InstanceUpgradeDetails", + ) + + status: Status = proto.Field( + proto.ENUM, + number=1, + enum=Status, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_upgrade_details: MutableSequence[ + ClusterUpgradeDetails + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=ClusterUpgradeDetails, + ) + + class DeleteClusterRequest(proto.Message): r"""Message for deleting a Cluster @@ -384,17 +670,17 @@ class DeleteClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -407,10 +693,10 @@ class DeleteClusterRequest(proto.Message): current etag of the Cluster, deletion will be blocked and an ABORTED error will be returned. validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - delete. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. force (bool): Optional. Whether to cascade delete child instances for given cluster. @@ -438,6 +724,55 @@ class DeleteClusterRequest(proto.Message): ) +class SwitchoverClusterRequest(proto.Message): + r"""Message for switching over to a cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class PromoteClusterRequest(proto.Message): r"""Message for promoting a Cluster @@ -449,10 +784,10 @@ class PromoteClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -472,10 +807,10 @@ class PromoteClusterRequest(proto.Message): current etag of the Cluster, deletion will be blocked and an ABORTED error will be returned. validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - delete. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ name: str = proto.Field( @@ -529,17 +864,17 @@ class RestoreClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -547,10 +882,10 @@ class RestoreClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - import request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ backup_source: resources.BackupSource = proto.Field( @@ -706,17 +1041,17 @@ class CreateInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -724,10 +1059,10 @@ class CreateInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -768,17 +1103,17 @@ class CreateSecondaryInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -786,10 +1121,10 @@ class CreateSecondaryInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -845,17 +1180,17 @@ class BatchCreateInstancesRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -983,7 +1318,7 @@ class State(proto.Enum): The state of the instance is unknown. PENDING_CREATE (1): Instance is pending creation and has not yet - been picked up for processsing in the backend. + been picked up for processing in the backend. READY (2): The instance is active and running. CREATING (3): @@ -1044,17 +1379,17 @@ class UpdateInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1062,10 +1397,10 @@ class UpdateInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - update request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. allow_missing (bool): Optional. If set to true, update succeeds even if instance is not found. In that case, a new instance is created and @@ -1107,17 +1442,17 @@ class DeleteInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1130,10 +1465,10 @@ class DeleteInstanceRequest(proto.Message): current etag of the Instance, deletion will be blocked and an ABORTED error will be returned. validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - delete. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ name: str = proto.Field( @@ -1165,17 +1500,17 @@ class FailoverInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1183,10 +1518,10 @@ class FailoverInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - failover. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ name: str = proto.Field( @@ -1217,17 +1552,17 @@ class InjectFaultRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1235,10 +1570,10 @@ class InjectFaultRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - fault injection. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ class FaultType(proto.Enum): @@ -1284,17 +1619,17 @@ class RestartInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1302,10 +1637,14 @@ class RestartInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - restart. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. + node_ids (MutableSequence[str]): + Optional. Full name of the nodes as obtained from + INSTANCE_VIEW_FULL to restart upon. Applicable only to read + instances. """ name: str = proto.Field( @@ -1320,6 +1659,162 @@ class RestartInstanceRequest(proto.Message): proto.BOOL, number=3, ) + node_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class ExecuteSqlRequest(proto.Message): + r"""Request for ExecuteSql rpc. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + password (str): + Optional. The database native user’s + password. + + This field is a member of `oneof`_ ``user_credential``. + instance (str): + Required. The instance where the SQL will be + executed. For the required format, see the + comment on the Instance.name field. + database (str): + Required. Name of the database where the query will be + executed. Note - Value provided should be the same as + expected from ``SELECT current_database();`` and NOT as a + resource reference. + user (str): + Required. Database user to be used for executing the SQL. + Note - Value provided should be the same as expected from + ``SELECT current_user;`` and NOT as a resource reference. + sql_statement (str): + Required. SQL statement to execute on + database. Any valid statement is permitted, + including DDL, DML, DQL statements. + """ + + password: str = proto.Field( + proto.STRING, + number=5, + oneof="user_credential", + ) + instance: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + user: str = proto.Field( + proto.STRING, + number=3, + ) + sql_statement: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ExecuteSqlResponse(proto.Message): + r"""Execute a SQL statement response. + + Attributes: + sql_results (MutableSequence[google.cloud.alloydb_v1alpha.types.SqlResult]): + SqlResult represents the results for the + execution of sql statements. + metadata (google.cloud.alloydb_v1alpha.types.ExecuteSqlMetadata): + Any additional metadata information regarding + the execution of the sql statement. + """ + + sql_results: MutableSequence[data_model.SqlResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data_model.SqlResult, + ) + metadata: "ExecuteSqlMetadata" = proto.Field( + proto.MESSAGE, + number=3, + message="ExecuteSqlMetadata", + ) + + +class ExecuteSqlMetadata(proto.Message): + r"""Any additional metadata information regarding the execution + of the SQL + + Attributes: + message (str): + Message related to SQL execution. Marked as + core content since it can potentially contain + details related to the query or result set. This + field can be used to convey messages such as + "when the SQL result set exceeds the acceptable + response size limits.". + partial_result (bool): + Set to true if SQL returned a result set + larger than the acceptable response size limits + and the result was truncated. + sql_statement_execution_duration (google.protobuf.duration_pb2.Duration): + The time duration taken to execute the sql + statement. + status (google.cloud.alloydb_v1alpha.types.ExecuteSqlMetadata.Status): + Status of SQL execution. + """ + + class Status(proto.Enum): + r"""Status contains all valid Status a SQL execution can end up + in. + + Values: + STATUS_UNSPECIFIED (0): + The status is unknown. + OK (1): + No error during SQL execution i.e. All SQL + statements ran to completion. The "message" will + be empty. + PARTIAL (2): + Same as OK, except indicates that only + partial results were returned. The "message" + field will contain details on why results were + truncated. + ERROR (3): + Error during SQL execution. Atleast 1 SQL + statement execution resulted in a error. Side + effects of other statements are rolled back. + The "message" field will contain human readable + error given by Postgres of the first bad SQL + statement. SQL execution errors don't constitute + API errors as defined in + https://google.aip.dev/193 but will be returned + as part of this message. + """ + STATUS_UNSPECIFIED = 0 + OK = 1 + PARTIAL = 2 + ERROR = 3 + + message: str = proto.Field( + proto.STRING, + number=1, + ) + partial_result: bool = proto.Field( + proto.BOOL, + number=2, + ) + sql_statement_execution_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + status: Status = proto.Field( + proto.ENUM, + number=4, + enum=Status, + ) class ListBackupsRequest(proto.Message): @@ -1422,17 +1917,17 @@ class CreateBackupRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1483,17 +1978,17 @@ class UpdateBackupRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1544,17 +2039,17 @@ class DeleteBackupRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1668,17 +2163,17 @@ class GenerateClientCertificateRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1774,17 +2269,17 @@ class GetConnectionInfoRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1806,6 +2301,11 @@ class GetConnectionInfoRequest(proto.Message): class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -1813,6 +2313,16 @@ class OperationMetadata(proto.Message): Output only. BatchCreateInstances related metadata. + This field is a member of `oneof`_ ``request_specific``. + promote_cluster_status (google.cloud.alloydb_v1alpha.types.PromoteClusterStatus): + Output only. PromoteClusterStatus related + metadata. + + This field is a member of `oneof`_ ``request_specific``. + upgrade_cluster_status (google.cloud.alloydb_v1alpha.types.UpgradeClusterStatus): + Output only. UpgradeClusterStatus related + metadata. + This field is a member of `oneof`_ ``request_specific``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was @@ -1846,6 +2356,18 @@ class OperationMetadata(proto.Message): oneof="request_specific", message="BatchCreateInstancesMetadata", ) + promote_cluster_status: "PromoteClusterStatus" = proto.Field( + proto.MESSAGE, + number=9, + oneof="request_specific", + message="PromoteClusterStatus", + ) + upgrade_cluster_status: "UpgradeClusterStatus" = proto.Field( + proto.MESSAGE, + number=10, + oneof="request_specific", + message="UpgradeClusterStatus", + ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, @@ -1878,6 +2400,178 @@ class OperationMetadata(proto.Message): ) +class PromoteClusterStatus(proto.Message): + r"""Message for current status of the database during Promote + Cluster operation. + + Attributes: + state (google.cloud.alloydb_v1alpha.types.PromoteClusterStatus.State): + Output only. The current state of the promoted secondary's + database. Once the operation is complete, the final state of + the database in the LRO can be one of: + + 1. PROMOTE_CLUSTER_AVAILABLE_FOR_READ, indicating that the + promote operation has failed mid-way, the database is + still only available for read. + 2. PROMOTE_CLUSTER_COMPLETED, indicating that a promote + operation completed successfully. The database is + available for both read and write requests + """ + + class State(proto.Enum): + r"""State contains all valid states of the database during + promote cluster operation. This is used for status reporting + through the LRO metadata. + + Values: + STATE_UNSPECIFIED (0): + The state of the database is unknown. + PROMOTE_CLUSTER_AVAILABLE_FOR_READ (1): + The database is only available for read. + PROMOTE_CLUSTER_AVAILABLE_FOR_WRITE (2): + The database is available for both read and + write. The promote operation is near completion. + PROMOTE_CLUSTER_COMPLETED (3): + The promote operation is completed and the + database is available for write. + """ + STATE_UNSPECIFIED = 0 + PROMOTE_CLUSTER_AVAILABLE_FOR_READ = 1 + PROMOTE_CLUSTER_AVAILABLE_FOR_WRITE = 2 + PROMOTE_CLUSTER_COMPLETED = 3 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + + +class UpgradeClusterStatus(proto.Message): + r"""Message for current status of the Major Version Upgrade + operation. + + Attributes: + state (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Status): + Cluster Major Version Upgrade state. + cancellable (bool): + Whether the operation is cancellable. + source_version (google.cloud.alloydb_v1alpha.types.DatabaseVersion): + Source database major version. + target_version (google.cloud.alloydb_v1alpha.types.DatabaseVersion): + Target database major version. + stages (MutableSequence[google.cloud.alloydb_v1alpha.types.UpgradeClusterStatus.StageStatus]): + Status of all upgrade stages. + """ + + class StageStatus(proto.Message): + r"""Status of an upgrade stage. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + read_pool_instances_upgrade (google.cloud.alloydb_v1alpha.types.UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus): + Read pool instances upgrade metadata. + + This field is a member of `oneof`_ ``stage_specific_status``. + stage (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Stage): + Upgrade stage. + state (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Status): + State of this stage. + """ + + read_pool_instances_upgrade: "UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus" = proto.Field( + proto.MESSAGE, + number=11, + oneof="stage_specific_status", + message="UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus", + ) + stage: "UpgradeClusterResponse.Stage" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeClusterResponse.Stage", + ) + state: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + + class ReadPoolInstancesUpgradeStageStatus(proto.Message): + r"""Read pool instances upgrade specific status. + + Attributes: + upgrade_stats (google.cloud.alloydb_v1alpha.types.UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus.Stats): + Read pool instances upgrade statistics. + """ + + class Stats(proto.Message): + r"""Upgrade stats for read pool instances. + + Attributes: + not_started (int): + Number of read pool instances for which + upgrade has not started. + ongoing (int): + Number of read pool instances undergoing + upgrade. + success (int): + Number of read pool instances successfully + upgraded. + failed (int): + Number of read pool instances which failed to + upgrade. + """ + + not_started: int = proto.Field( + proto.INT32, + number=1, + ) + ongoing: int = proto.Field( + proto.INT32, + number=2, + ) + success: int = proto.Field( + proto.INT32, + number=3, + ) + failed: int = proto.Field( + proto.INT32, + number=4, + ) + + upgrade_stats: "UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus.Stats" = proto.Field( + proto.MESSAGE, + number=1, + message="UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus.Stats", + ) + + state: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeClusterResponse.Status", + ) + cancellable: bool = proto.Field( + proto.BOOL, + number=2, + ) + source_version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=3, + enum=resources.DatabaseVersion, + ) + target_version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=4, + enum=resources.DatabaseVersion, + ) + stages: MutableSequence[StageStatus] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=StageStatus, + ) + + class ListUsersRequest(proto.Message): r"""Message for requesting list of Users @@ -1981,17 +2675,17 @@ class CreateUserRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -2042,17 +2736,17 @@ class UpdateUserRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -2102,17 +2796,17 @@ class DeleteUserRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -2146,12 +2840,10 @@ class ListDatabasesRequest(proto.Message): Required. Parent value for ListDatabasesRequest. page_size (int): - Optional. The maximum number of databases to - return. The service may return fewer than this - value. If unspecified, an appropriate number of - databases will be returned. The max value will - be 2000, values above max will be coerced to - max. + Optional. The maximum number of databases to return. The + service may return fewer than this value. If unspecified, + 2000 is the default page_size. The max value of page_size + will be 4000, values above max will be coerced to max. page_token (str): Optional. A page token, received from a previous ``ListDatabases`` call. This should be provided to retrieve diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py index 209ee9e5b9ce..f6ea1b6aeffb 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py @@ -19,6 +19,9 @@ from .services.alloy_db_admin import AlloyDBAdminAsyncClient, AlloyDBAdminClient +from .types.csql_resources import CloudSQLBackupRunSource +from .types.data_model import SqlResult, SqlResultColumn, SqlResultRow, SqlResultValue +from .types.gemini import GeminiClusterConfig, GeminiInstanceConfig from .types.resources import ( AutomatedBackupPolicy, Backup, @@ -39,6 +42,7 @@ MaintenanceUpdatePolicy, MigrationSource, SslConfig, + SubscriptionType, SupportedDatabaseFlag, User, UserPassword, @@ -59,6 +63,9 @@ DeleteClusterRequest, DeleteInstanceRequest, DeleteUserRequest, + ExecuteSqlMetadata, + ExecuteSqlRequest, + ExecuteSqlResponse, FailoverInstanceRequest, GenerateClientCertificateRequest, GenerateClientCertificateResponse, @@ -82,12 +89,17 @@ ListUsersResponse, OperationMetadata, PromoteClusterRequest, + PromoteClusterStatus, RestartInstanceRequest, RestoreClusterRequest, + SwitchoverClusterRequest, UpdateBackupRequest, UpdateClusterRequest, UpdateInstanceRequest, UpdateUserRequest, + UpgradeClusterRequest, + UpgradeClusterResponse, + UpgradeClusterStatus, ) __all__ = ( @@ -100,6 +112,7 @@ "BatchCreateInstancesMetadata", "BatchCreateInstancesRequest", "BatchCreateInstancesResponse", + "CloudSQLBackupRunSource", "Cluster", "ClusterView", "ConnectionInfo", @@ -121,7 +134,12 @@ "DeleteUserRequest", "EncryptionConfig", "EncryptionInfo", + "ExecuteSqlMetadata", + "ExecuteSqlRequest", + "ExecuteSqlResponse", "FailoverInstanceRequest", + "GeminiClusterConfig", + "GeminiInstanceConfig", "GenerateClientCertificateRequest", "GenerateClientCertificateResponse", "GetBackupRequest", @@ -149,14 +167,24 @@ "MigrationSource", "OperationMetadata", "PromoteClusterRequest", + "PromoteClusterStatus", "RestartInstanceRequest", "RestoreClusterRequest", + "SqlResult", + "SqlResultColumn", + "SqlResultRow", + "SqlResultValue", "SslConfig", + "SubscriptionType", "SupportedDatabaseFlag", + "SwitchoverClusterRequest", "UpdateBackupRequest", "UpdateClusterRequest", "UpdateInstanceRequest", "UpdateUserRequest", + "UpgradeClusterRequest", + "UpgradeClusterResponse", + "UpgradeClusterStatus", "User", "UserPassword", ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json index 8fcd1982baa7..4f5e11f08247 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json @@ -65,6 +65,11 @@ "delete_user" ] }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, "FailoverInstance": { "methods": [ "failover_instance" @@ -150,6 +155,11 @@ "restore_cluster" ] }, + "SwitchoverCluster": { + "methods": [ + "switchover_cluster" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -169,6 +179,11 @@ "methods": [ "update_user" ] + }, + "UpgradeCluster": { + "methods": [ + "upgrade_cluster" + ] } } }, @@ -230,6 +245,11 @@ "delete_user" ] }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, "FailoverInstance": { "methods": [ "failover_instance" @@ -315,6 +335,11 @@ "restore_cluster" ] }, + "SwitchoverCluster": { + "methods": [ + "switchover_cluster" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -334,6 +359,11 @@ "methods": [ "update_user" ] + }, + "UpgradeCluster": { + "methods": [ + "upgrade_cluster" + ] } } }, @@ -395,6 +425,11 @@ "delete_user" ] }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, "FailoverInstance": { "methods": [ "failover_instance" @@ -480,6 +515,11 @@ "restore_cluster" ] }, + "SwitchoverCluster": { + "methods": [ + "switchover_cluster" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -499,6 +539,11 @@ "methods": [ "update_user" ] + }, + "UpgradeCluster": { + "methods": [ + "upgrade_cluster" + ] } } } diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index 7d28791e7569..386ddb96d97f 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.15" # {x-release-please-version} +__version__ = "0.4.0" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py index 457a44a62114..dccd95717818 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py @@ -53,7 +53,13 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.alloydb_v1beta.services.alloy_db_admin import pagers -from google.cloud.alloydb_v1beta.types import resources, service +from google.cloud.alloydb_v1beta.types import ( + csql_resources, + data_model, + gemini, + resources, + service, +) from .client import AlloyDBAdminClient from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport @@ -806,6 +812,139 @@ async def sample_update_cluster(): # Done; return the response. return response + async def upgrade_cluster( + self, + request: Optional[Union[service.UpgradeClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + version: Optional[resources.DatabaseVersion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Upgrades a single Cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.UpgradeClusterRequest, dict]]): + The request object. Upgrades a cluster. + name (:class:`str`): + Required. The resource name of the + cluster. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (:class:`google.cloud.alloydb_v1beta.types.DatabaseVersion`): + Required. The version the cluster is + going to be upgraded to. + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.UpgradeClusterResponse` + UpgradeClusterResponse contains the response for upgrade + cluster operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpgradeClusterRequest): + request = service.UpgradeClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if version is not None: + request.version = version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.upgrade_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + service.UpgradeClusterResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + async def delete_cluster( self, request: Optional[Union[service.DeleteClusterRequest, dict]] = None, @@ -1059,6 +1198,132 @@ async def sample_promote_cluster(): # Done; return the response. return response + async def switchover_cluster( + self, + request: Optional[Union[service.SwitchoverClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_switchover_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.SwitchoverClusterRequest, dict]]): + The request object. Message for switching over to a + cluster + name (:class:`str`): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SwitchoverClusterRequest): + request = service.SwitchoverClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.switchover_cluster + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + async def restore_cluster( self, request: Optional[Union[service.RestoreClusterRequest, dict]] = None, @@ -2605,6 +2870,162 @@ async def sample_restart_instance(): # Done; return the response. return response + async def execute_sql( + self, + request: Optional[Union[service.ExecuteSqlRequest, dict]] = None, + *, + instance: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + sql_statement: Optional[str] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ExecuteSqlResponse: + r"""Executes a SQL statement in a database inside an + AlloyDB instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_execute_sql(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.ExecuteSqlRequest, dict]]): + The request object. Request for ExecuteSql rpc. + instance (:class:`str`): + Required. The instance where the SQL + will be executed. For the required + format, see the comment on the + Instance.name field. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (:class:`str`): + Required. Name of the database where the query will be + executed. Note - Value provided should be the same as + expected from ``SELECT current_database();`` and NOT as + a resource reference. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + user (:class:`str`): + Required. Database user to be used for executing the + SQL. Note - Value provided should be the same as + expected from ``SELECT current_user;`` and NOT as a + resource reference. + + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sql_statement (:class:`str`): + Required. SQL statement to execute on + database. Any valid statement is + permitted, including DDL, DML, DQL + statements. + + This corresponds to the ``sql_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + password (:class:`str`): + Optional. The database native user’s + password. + + This corresponds to the ``password`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.ExecuteSqlResponse: + Execute a SQL statement response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, database, user, sql_statement, password]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ExecuteSqlRequest): + request = service.ExecuteSqlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if database is not None: + request.database = database + if user is not None: + request.user = user + if sql_statement is not None: + request.sql_statement = sql_statement + if password is not None: + request.password = password + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.execute_sql + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_backups( self, request: Optional[Union[service.ListBackupsRequest, dict]] = None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py index 266026c341ee..34d74a6c8c3a 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py @@ -59,7 +59,13 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.alloydb_v1beta.services.alloy_db_admin import pagers -from google.cloud.alloydb_v1beta.types import resources, service +from google.cloud.alloydb_v1beta.types import ( + csql_resources, + data_model, + gemini, + resources, + service, +) from .transports.base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport from .transports.grpc import AlloyDBAdminGrpcTransport @@ -654,36 +660,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AlloyDBAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -693,13 +669,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AlloyDBAdminClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -1377,6 +1349,136 @@ def sample_update_cluster(): # Done; return the response. return response + def upgrade_cluster( + self, + request: Optional[Union[service.UpgradeClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + version: Optional[resources.DatabaseVersion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Upgrades a single Cluster. + Imperative only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.UpgradeClusterRequest, dict]): + The request object. Upgrades a cluster. + name (str): + Required. The resource name of the + cluster. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (google.cloud.alloydb_v1beta.types.DatabaseVersion): + Required. The version the cluster is + going to be upgraded to. + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.alloydb_v1beta.types.UpgradeClusterResponse` + UpgradeClusterResponse contains the response for upgrade + cluster operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpgradeClusterRequest): + request = service.UpgradeClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if version is not None: + request.version = version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.upgrade_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + service.UpgradeClusterResponse, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + def delete_cluster( self, request: Optional[Union[service.DeleteClusterRequest, dict]] = None, @@ -1624,6 +1726,129 @@ def sample_promote_cluster(): # Done; return the response. return response + def switchover_cluster( + self, + request: Optional[Union[service.SwitchoverClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_switchover_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.SwitchoverClusterRequest, dict]): + The request object. Message for switching over to a + cluster + name (str): + Required. The name of the resource. + For the required format, see the comment + on the Cluster.name field + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.alloydb_v1beta.types.Cluster` A cluster is a collection of regional AlloyDB resources. It can include a + primary instance and one or more read pool instances. + All cluster resources share a storage layer, which + scales as needed. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SwitchoverClusterRequest): + request = service.SwitchoverClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.switchover_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + resources.Cluster, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + def restore_cluster( self, request: Optional[Union[service.RestoreClusterRequest, dict]] = None, @@ -3138,6 +3363,159 @@ def sample_restart_instance(): # Done; return the response. return response + def execute_sql( + self, + request: Optional[Union[service.ExecuteSqlRequest, dict]] = None, + *, + instance: Optional[str] = None, + database: Optional[str] = None, + user: Optional[str] = None, + sql_statement: Optional[str] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ExecuteSqlResponse: + r"""Executes a SQL statement in a database inside an + AlloyDB instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_execute_sql(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.ExecuteSqlRequest, dict]): + The request object. Request for ExecuteSql rpc. + instance (str): + Required. The instance where the SQL + will be executed. For the required + format, see the comment on the + Instance.name field. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (str): + Required. Name of the database where the query will be + executed. Note - Value provided should be the same as + expected from ``SELECT current_database();`` and NOT as + a resource reference. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + user (str): + Required. Database user to be used for executing the + SQL. Note - Value provided should be the same as + expected from ``SELECT current_user;`` and NOT as a + resource reference. + + This corresponds to the ``user`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sql_statement (str): + Required. SQL statement to execute on + database. Any valid statement is + permitted, including DDL, DML, DQL + statements. + + This corresponds to the ``sql_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + password (str): + Optional. The database native user’s + password. + + This corresponds to the ``password`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1beta.types.ExecuteSqlResponse: + Execute a SQL statement response. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, database, user, sql_statement, password]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ExecuteSqlRequest): + request = service.ExecuteSqlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if database is not None: + request.database = database + if user is not None: + request.user = user + if sql_statement is not None: + request.sql_statement = sql_statement + if password is not None: + request.password = password + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def list_backups( self, request: Optional[Union[service.ListBackupsRequest, dict]] = None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py index 955fe8b4ac53..bf07dfc53095 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py @@ -171,6 +171,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.upgrade_cluster: gapic_v1.method.wrap_method( + self.upgrade_cluster, + default_timeout=None, + client_info=client_info, + ), self.delete_cluster: gapic_v1.method.wrap_method( self.delete_cluster, default_timeout=None, @@ -181,6 +186,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.switchover_cluster: gapic_v1.method.wrap_method( + self.switchover_cluster, + default_timeout=None, + client_info=client_info, + ), self.restore_cluster: gapic_v1.method.wrap_method( self.restore_cluster, default_timeout=None, @@ -259,6 +269,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.execute_sql: gapic_v1.method.wrap_method( + self.execute_sql, + default_timeout=None, + client_info=client_info, + ), self.list_backups: gapic_v1.method.wrap_method( self.list_backups, default_retry=retries.Retry( @@ -483,6 +498,15 @@ def update_cluster( ]: raise NotImplementedError() + @property + def upgrade_cluster( + self, + ) -> Callable[ + [service.UpgradeClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def delete_cluster( self, @@ -501,6 +525,15 @@ def promote_cluster( ]: raise NotImplementedError() + @property + def switchover_cluster( + self, + ) -> Callable[ + [service.SwitchoverClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def restore_cluster( self, @@ -609,6 +642,15 @@ def restart_instance( ]: raise NotImplementedError() + @property + def execute_sql( + self, + ) -> Callable[ + [service.ExecuteSqlRequest], + Union[service.ExecuteSqlResponse, Awaitable[service.ExecuteSqlResponse]], + ]: + raise NotImplementedError() + @property def list_backups( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py index 8122bea7f5b1..0dab282cda4b 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py @@ -357,6 +357,33 @@ def update_cluster( ) return self._stubs["update_cluster"] + @property + def upgrade_cluster( + self, + ) -> Callable[[service.UpgradeClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the upgrade cluster method over gRPC. + + Upgrades a single Cluster. + Imperative only. + + Returns: + Callable[[~.UpgradeClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_cluster" not in self._stubs: + self._stubs["upgrade_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpgradeCluster", + request_serializer=service.UpgradeClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_cluster"] + @property def delete_cluster( self, @@ -412,6 +439,35 @@ def promote_cluster( ) return self._stubs["promote_cluster"] + @property + def switchover_cluster( + self, + ) -> Callable[[service.SwitchoverClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the switchover cluster method over gRPC. + + Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + Returns: + Callable[[~.SwitchoverClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switchover_cluster" not in self._stubs: + self._stubs["switchover_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/SwitchoverCluster", + request_serializer=service.SwitchoverClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switchover_cluster"] + @property def restore_cluster( self, @@ -746,6 +802,33 @@ def restart_instance( ) return self._stubs["restart_instance"] + @property + def execute_sql( + self, + ) -> Callable[[service.ExecuteSqlRequest], service.ExecuteSqlResponse]: + r"""Return a callable for the execute sql method over gRPC. + + Executes a SQL statement in a database inside an + AlloyDB instance. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.ExecuteSqlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_sql" not in self._stubs: + self._stubs["execute_sql"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ExecuteSql", + request_serializer=service.ExecuteSqlRequest.serialize, + response_deserializer=service.ExecuteSqlResponse.deserialize, + ) + return self._stubs["execute_sql"] + @property def list_backups( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py index cec93427c5b0..5b7012e7a607 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py @@ -371,6 +371,33 @@ def update_cluster( ) return self._stubs["update_cluster"] + @property + def upgrade_cluster( + self, + ) -> Callable[[service.UpgradeClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the upgrade cluster method over gRPC. + + Upgrades a single Cluster. + Imperative only. + + Returns: + Callable[[~.UpgradeClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_cluster" not in self._stubs: + self._stubs["upgrade_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/UpgradeCluster", + request_serializer=service.UpgradeClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_cluster"] + @property def delete_cluster( self, @@ -426,6 +453,37 @@ def promote_cluster( ) return self._stubs["promote_cluster"] + @property + def switchover_cluster( + self, + ) -> Callable[ + [service.SwitchoverClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the switchover cluster method over gRPC. + + Switches the roles of PRIMARY and SECONDARY clusters + without any data loss. This promotes the SECONDARY + cluster to PRIMARY and sets up the original PRIMARY + cluster to replicate from this newly promoted cluster. + + Returns: + Callable[[~.SwitchoverClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switchover_cluster" not in self._stubs: + self._stubs["switchover_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/SwitchoverCluster", + request_serializer=service.SwitchoverClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switchover_cluster"] + @property def restore_cluster( self, @@ -772,6 +830,33 @@ def restart_instance( ) return self._stubs["restart_instance"] + @property + def execute_sql( + self, + ) -> Callable[[service.ExecuteSqlRequest], Awaitable[service.ExecuteSqlResponse]]: + r"""Return a callable for the execute sql method over gRPC. + + Executes a SQL statement in a database inside an + AlloyDB instance. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.ExecuteSqlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_sql" not in self._stubs: + self._stubs["execute_sql"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/ExecuteSql", + request_serializer=service.ExecuteSqlRequest.serialize, + response_deserializer=service.ExecuteSqlResponse.deserialize, + ) + return self._stubs["execute_sql"] + @property def list_backups( self, @@ -1195,6 +1280,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.upgrade_cluster: self._wrap_method( + self.upgrade_cluster, + default_timeout=None, + client_info=client_info, + ), self.delete_cluster: self._wrap_method( self.delete_cluster, default_timeout=None, @@ -1205,6 +1295,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.switchover_cluster: self._wrap_method( + self.switchover_cluster, + default_timeout=None, + client_info=client_info, + ), self.restore_cluster: self._wrap_method( self.restore_cluster, default_timeout=None, @@ -1283,6 +1378,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.execute_sql: self._wrap_method( + self.execute_sql, + default_timeout=None, + client_info=client_info, + ), self.list_backups: self._wrap_method( self.list_backups, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py index 7b70c49f05d6..91964a1c55e8 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py @@ -149,6 +149,14 @@ def pre_delete_user(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_execute_sql(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_sql(self, response): + logging.log(f"Received response: {response}") + return response + def pre_failover_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -285,6 +293,14 @@ def post_restore_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_switchover_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_switchover_cluster(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_backup(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -317,6 +333,14 @@ def post_update_user(self, response): logging.log(f"Received response: {response}") return response + def pre_upgrade_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_upgrade_cluster(self, response): + logging.log(f"Received response: {response}") + return response + transport = AlloyDBAdminRestTransport(interceptor=MyCustomAlloyDBAdminInterceptor()) client = AlloyDBAdminClient(transport=transport) @@ -551,6 +575,27 @@ def pre_delete_user( """ return request, metadata + def pre_execute_sql( + self, request: service.ExecuteSqlRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ExecuteSqlRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for execute_sql + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_execute_sql( + self, response: service.ExecuteSqlResponse + ) -> service.ExecuteSqlResponse: + """Post-rpc interceptor for execute_sql + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + def pre_failover_instance( self, request: service.FailoverInstanceRequest, @@ -914,6 +959,29 @@ def post_restore_cluster( """ return response + def pre_switchover_cluster( + self, + request: service.SwitchoverClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.SwitchoverClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for switchover_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_switchover_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for switchover_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + def pre_update_backup( self, request: service.UpdateBackupRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[service.UpdateBackupRequest, Sequence[Tuple[str, str]]]: @@ -998,6 +1066,29 @@ def post_update_user(self, response: resources.User) -> resources.User: """ return response + def pre_upgrade_cluster( + self, + request: service.UpgradeClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpgradeClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for upgrade_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_upgrade_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for upgrade_cluster + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -2304,6 +2395,106 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _ExecuteSql( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.ExecuteSql") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.ExecuteSqlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ExecuteSqlResponse: + r"""Call the execute sql method over HTTP. + + Args: + request (~.service.ExecuteSqlRequest): + The request object. Request for ExecuteSql rpc. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ExecuteSqlResponse: + Execute a SQL statement response. + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_http_options() + ) + request, metadata = self._interceptor.pre_execute_sql(request, metadata) + transcoded_request = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_query_params_json( + transcoded_request + ) + ) + + # Send the request + response = AlloyDBAdminRestTransport._ExecuteSql._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ExecuteSqlResponse() + pb_resp = service.ExecuteSqlResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_execute_sql(resp) + return resp + class _FailoverInstance( _BaseAlloyDBAdminRestTransport._BaseFailoverInstance, AlloyDBAdminRestStub ): @@ -3925,6 +4116,104 @@ def __call__( resp = self._interceptor.post_restore_cluster(resp) return resp + class _SwitchoverCluster( + _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.SwitchoverCluster") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.SwitchoverClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the switchover cluster method over HTTP. + + Args: + request (~.service.SwitchoverClusterRequest): + The request object. Message for switching over to a + cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_http_options() + ) + request, metadata = self._interceptor.pre_switchover_cluster( + request, metadata + ) + transcoded_request = _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_transcoded_request( + http_options, request + ) + + body = _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_query_params_json( + transcoded_request + ) + + # Send the request + response = AlloyDBAdminRestTransport._SwitchoverCluster._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_switchover_cluster(resp) + return resp + class _UpdateBackup( _BaseAlloyDBAdminRestTransport._BaseUpdateBackup, AlloyDBAdminRestStub ): @@ -4314,6 +4603,101 @@ def __call__( resp = self._interceptor.post_update_user(resp) return resp + class _UpgradeCluster( + _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.UpgradeCluster") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.UpgradeClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the upgrade cluster method over HTTP. + + Args: + request (~.service.UpgradeClusterRequest): + The request object. Upgrades a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_http_options() + ) + request, metadata = self._interceptor.pre_upgrade_cluster(request, metadata) + transcoded_request = _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_transcoded_request( + http_options, request + ) + + body = _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_query_params_json( + transcoded_request + ) + + # Send the request + response = AlloyDBAdminRestTransport._UpgradeCluster._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_upgrade_cluster(resp) + return resp + @property def batch_create_instances( self, @@ -4398,6 +4782,14 @@ def delete_user(self) -> Callable[[service.DeleteUserRequest], empty_pb2.Empty]: # In C++ this would require a dynamic_cast return self._DeleteUser(self._session, self._host, self._interceptor) # type: ignore + @property + def execute_sql( + self, + ) -> Callable[[service.ExecuteSqlRequest], service.ExecuteSqlResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteSql(self._session, self._host, self._interceptor) # type: ignore + @property def failover_instance( self, @@ -4534,6 +4926,14 @@ def restore_cluster( # In C++ this would require a dynamic_cast return self._RestoreCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def switchover_cluster( + self, + ) -> Callable[[service.SwitchoverClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SwitchoverCluster(self._session, self._host, self._interceptor) # type: ignore + @property def update_backup( self, @@ -4564,6 +4964,14 @@ def update_user(self) -> Callable[[service.UpdateUserRequest], resources.User]: # In C++ this would require a dynamic_cast return self._UpdateUser(self._session, self._host, self._interceptor) # type: ignore + @property + def upgrade_cluster( + self, + ) -> Callable[[service.UpgradeClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpgradeCluster(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py index b4096bdbb053..a14a0f39bb7d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py @@ -691,6 +691,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseExecuteSql: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{instance=projects/*/locations/*/clusters/*/instances/*}:executeSql", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ExecuteSqlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseExecuteSql._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseFailoverInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1550,6 +1607,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseSwitchoverCluster: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*}:switchover", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.SwitchoverClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseSwitchoverCluster._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateBackup: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1778,6 +1892,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpgradeCluster: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{name=projects/*/locations/*/clusters/*}:upgrade", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.UpgradeClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseUpgradeCluster._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetLocation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py index 262b798ccfc1..c53192fde938 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .csql_resources import CloudSQLBackupRunSource +from .data_model import SqlResult, SqlResultColumn, SqlResultRow, SqlResultValue +from .gemini import GeminiClusterConfig, GeminiInstanceConfig from .resources import ( AutomatedBackupPolicy, Backup, @@ -33,6 +36,7 @@ MaintenanceUpdatePolicy, MigrationSource, SslConfig, + SubscriptionType, SupportedDatabaseFlag, User, UserPassword, @@ -53,6 +57,9 @@ DeleteClusterRequest, DeleteInstanceRequest, DeleteUserRequest, + ExecuteSqlMetadata, + ExecuteSqlRequest, + ExecuteSqlResponse, FailoverInstanceRequest, GenerateClientCertificateRequest, GenerateClientCertificateResponse, @@ -76,15 +83,27 @@ ListUsersResponse, OperationMetadata, PromoteClusterRequest, + PromoteClusterStatus, RestartInstanceRequest, RestoreClusterRequest, + SwitchoverClusterRequest, UpdateBackupRequest, UpdateClusterRequest, UpdateInstanceRequest, UpdateUserRequest, + UpgradeClusterRequest, + UpgradeClusterResponse, + UpgradeClusterStatus, ) __all__ = ( + "CloudSQLBackupRunSource", + "SqlResult", + "SqlResultColumn", + "SqlResultRow", + "SqlResultValue", + "GeminiClusterConfig", + "GeminiInstanceConfig", "AutomatedBackupPolicy", "Backup", "BackupSource", @@ -107,6 +126,7 @@ "ClusterView", "DatabaseVersion", "InstanceView", + "SubscriptionType", "BatchCreateInstancesMetadata", "BatchCreateInstancesRequest", "BatchCreateInstancesResponse", @@ -122,6 +142,9 @@ "DeleteClusterRequest", "DeleteInstanceRequest", "DeleteUserRequest", + "ExecuteSqlMetadata", + "ExecuteSqlRequest", + "ExecuteSqlResponse", "FailoverInstanceRequest", "GenerateClientCertificateRequest", "GenerateClientCertificateResponse", @@ -145,10 +168,15 @@ "ListUsersResponse", "OperationMetadata", "PromoteClusterRequest", + "PromoteClusterStatus", "RestartInstanceRequest", "RestoreClusterRequest", + "SwitchoverClusterRequest", "UpdateBackupRequest", "UpdateClusterRequest", "UpdateInstanceRequest", "UpdateUserRequest", + "UpgradeClusterRequest", + "UpgradeClusterResponse", + "UpgradeClusterStatus", ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/csql_resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/csql_resources.py new file mode 100644 index 000000000000..801695fb5bc5 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/csql_resources.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1beta", + manifest={ + "CloudSQLBackupRunSource", + }, +) + + +class CloudSQLBackupRunSource(proto.Message): + r"""The source CloudSQL backup resource. + + Attributes: + project (str): + The project ID of the source CloudSQL + instance. This should be the same as the AlloyDB + cluster's project. + instance_id (str): + Required. The CloudSQL instance ID. + backup_run_id (int): + Required. The CloudSQL backup run ID. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_run_id: int = proto.Field( + proto.INT64, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/data_model.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/data_model.py new file mode 100644 index 000000000000..deff388702da --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/data_model.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1beta", + manifest={ + "SqlResult", + "SqlResultColumn", + "SqlResultRow", + "SqlResultValue", + }, +) + + +class SqlResult(proto.Message): + r"""SqlResult represents the result for the execution of a sql + statement. + + Attributes: + columns (MutableSequence[google.cloud.alloydb_v1beta.types.SqlResultColumn]): + List of columns included in the result. This + also includes the data type of the column. + rows (MutableSequence[google.cloud.alloydb_v1beta.types.SqlResultRow]): + Rows returned by the SQL statement. + """ + + columns: MutableSequence["SqlResultColumn"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SqlResultColumn", + ) + rows: MutableSequence["SqlResultRow"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="SqlResultRow", + ) + + +class SqlResultColumn(proto.Message): + r"""Contains the name and datatype of a column in a SQL Result. + + Attributes: + name (str): + Name of the column. + type_ (str): + Datatype of the column as reported by the + postgres driver. Common type names are + "VARCHAR", "TEXT", "NVARCHAR", "DECIMAL", + "BOOL", "INT", and "BIGINT". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SqlResultRow(proto.Message): + r"""A single row from a sql result. + + Attributes: + values (MutableSequence[google.cloud.alloydb_v1beta.types.SqlResultValue]): + List of values in a row of sql result. + """ + + values: MutableSequence["SqlResultValue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SqlResultValue", + ) + + +class SqlResultValue(proto.Message): + r"""A single value in a row from a sql result. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + The cell value represented in string format. + Timestamps are converted to string using + RFC3339Nano format. + + This field is a member of `oneof`_ ``_value``. + null_value (bool): + Set to true if cell value is null. + + This field is a member of `oneof`_ ``_null_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + null_value: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/gemini.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/gemini.py new file mode 100644 index 000000000000..37172f861a55 --- /dev/null +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/gemini.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.alloydb.v1beta", + manifest={ + "GeminiClusterConfig", + "GeminiInstanceConfig", + }, +) + + +class GeminiClusterConfig(proto.Message): + r"""Cluster level configuration parameters related to the Gemini + in Databases add-on. + + Attributes: + entitled (bool): + Output only. Whether the Gemini in Databases + add-on is enabled for the cluster. It will be + true only if the add-on has been enabled for the + billing account corresponding to the cluster. + Its status is toggled from the Admin Control + Center (ACC) and cannot be toggled using + AlloyDB's APIs. + """ + + entitled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GeminiInstanceConfig(proto.Message): + r"""Instance level configuration parameters related to the Gemini + in Databases add-on. + + Attributes: + entitled (bool): + Output only. Whether the Gemini in Databases + add-on is enabled for the instance. It will be + true only if the add-on has been enabled for the + billing account corresponding to the instance. + Its status is toggled from the Admin Control + Center (ACC) and cannot be toggled using + AlloyDB's APIs. + """ + + entitled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index 9cf140f311a4..6284398ae73e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -24,12 +24,15 @@ from google.type import timeofday_pb2 # type: ignore import proto # type: ignore +from google.cloud.alloydb_v1beta.types import csql_resources, gemini + __protobuf__ = proto.module( package="google.cloud.alloydb.v1beta", manifest={ "InstanceView", "ClusterView", "DatabaseVersion", + "SubscriptionType", "UserPassword", "MigrationSource", "EncryptionConfig", @@ -113,11 +116,33 @@ class DatabaseVersion(proto.Enum): The database version is Postgres 14. POSTGRES_15 (3): The database version is Postgres 15. + POSTGRES_16 (4): + The database version is Postgres 16. """ DATABASE_VERSION_UNSPECIFIED = 0 POSTGRES_13 = 1 POSTGRES_14 = 2 POSTGRES_15 = 3 + POSTGRES_16 = 4 + + +class SubscriptionType(proto.Enum): + r"""Subscription_type added to distinguish between Standard and Trial + subscriptions. By default, a subscription type is considered + STANDARD unless explicitly specified. + + Values: + SUBSCRIPTION_TYPE_UNSPECIFIED (0): + This is an unknown subscription type. By + default, the subscription type is STANDARD. + STANDARD (1): + Standard subscription. + TRIAL (2): + Trial subscription. + """ + SUBSCRIPTION_TYPE_UNSPECIFIED = 0 + STANDARD = 1 + TRIAL = 2 class UserPassword(proto.Message): @@ -266,7 +291,7 @@ class SslMode(proto.Enum): Values: SSL_MODE_UNSPECIFIED (0): - SSL mode not specified. Defaults to ENCRYPTED_ONLY. + SSL mode is not specified. Defaults to ENCRYPTED_ONLY. SSL_MODE_ALLOW (1): SSL connections are optional. CA verification not enforced. @@ -278,7 +303,7 @@ class SslMode(proto.Enum): SSL_MODE_VERIFY_CA (3): SSL connections are required. CA verification enforced. Clients must have certificates signed - by a Cluster CA, e.g. via + by a Cluster CA, for example, using GenerateClientCertificate. ALLOW_UNENCRYPTED_AND_ENCRYPTED (4): SSL connections are optional. CA verification @@ -709,6 +734,11 @@ class Cluster(proto.Message): Output only. Cluster created via DMS migration. + This field is a member of `oneof`_ ``source``. + cloudsql_backup_run_source (google.cloud.alloydb_v1beta.types.CloudSQLBackupRunSource): + Output only. Cluster created from CloudSQL + snapshot. + This field is a member of `oneof`_ ``source``. name (str): Output only. The name of the cluster resource with the @@ -817,6 +847,9 @@ class Cluster(proto.Message): specific to PRIMARY cluster. satisfies_pzs (bool): Output only. Reserved for future use. + psc_config (google.cloud.alloydb_v1beta.types.Cluster.PscConfig): + Optional. The configuration for Private + Service Connect (PSC) for the cluster. maintenance_update_policy (google.cloud.alloydb_v1beta.types.MaintenanceUpdatePolicy): Optional. The maintenance update policy determines when to allow or deny updates. @@ -824,6 +857,21 @@ class Cluster(proto.Message): Output only. The maintenance schedule for the cluster, generated for a specific rollout if a maintenance window is set. + gemini_config (google.cloud.alloydb_v1beta.types.GeminiClusterConfig): + Optional. Configuration parameters related to + the Gemini in Databases add-on. + subscription_type (google.cloud.alloydb_v1beta.types.SubscriptionType): + Optional. Subscription type of the cluster. + trial_metadata (google.cloud.alloydb_v1beta.types.Cluster.TrialMetadata): + Output only. Metadata for free trial clusters + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag keys/values directly + bound to this resource. For example: + + :: + + "123/environment": "production", + "123/costCenter": "marketing". """ class State(proto.Enum): @@ -956,6 +1004,59 @@ class PrimaryConfig(proto.Message): number=1, ) + class PscConfig(proto.Message): + r"""PscConfig contains PSC related configuration at a cluster + level. + + Attributes: + psc_enabled (bool): + Optional. Create an instance that allows + connections from Private Service Connect + endpoints to the instance. + """ + + psc_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + class TrialMetadata(proto.Message): + r"""Contains information and all metadata related to TRIAL + clusters. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + start time of the trial cluster. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of the trial cluster. + upgrade_time (google.protobuf.timestamp_pb2.Timestamp): + Upgrade time of trial cluster to Standard + cluster. + grace_end_time (google.protobuf.timestamp_pb2.Timestamp): + grace end time of the cluster. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + upgrade_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + grace_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + backup_source: "BackupSource" = proto.Field( proto.MESSAGE, number=15, @@ -968,6 +1069,12 @@ class PrimaryConfig(proto.Message): oneof="source", message="MigrationSource", ) + cloudsql_backup_run_source: csql_resources.CloudSQLBackupRunSource = proto.Field( + proto.MESSAGE, + number=42, + oneof="source", + message=csql_resources.CloudSQLBackupRunSource, + ) name: str = proto.Field( proto.STRING, number=1, @@ -1086,6 +1193,11 @@ class PrimaryConfig(proto.Message): proto.BOOL, number=30, ) + psc_config: PscConfig = proto.Field( + proto.MESSAGE, + number=31, + message=PscConfig, + ) maintenance_update_policy: "MaintenanceUpdatePolicy" = proto.Field( proto.MESSAGE, number=32, @@ -1096,6 +1208,26 @@ class PrimaryConfig(proto.Message): number=37, message="MaintenanceSchedule", ) + gemini_config: gemini.GeminiClusterConfig = proto.Field( + proto.MESSAGE, + number=36, + message=gemini.GeminiClusterConfig, + ) + subscription_type: "SubscriptionType" = proto.Field( + proto.ENUM, + number=38, + enum="SubscriptionType", + ) + trial_metadata: TrialMetadata = proto.Field( + proto.MESSAGE, + number=39, + message=TrialMetadata, + ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=41, + ) class Instance(proto.Message): @@ -1184,6 +1316,8 @@ class Instance(proto.Message): PRIMARY instance. query_insights_config (google.cloud.alloydb_v1beta.types.Instance.QueryInsightsInstanceConfig): Configuration for query insights. + observability_config (google.cloud.alloydb_v1beta.types.Instance.ObservabilityInstanceConfig): + Configuration for observability. read_pool_config (google.cloud.alloydb_v1beta.types.Instance.ReadPoolConfig): Read pool instance configuration. This is required if the value of instanceType is READ_POOL. @@ -1227,8 +1361,11 @@ class Instance(proto.Message): Optional. The configuration for Private Service Connect (PSC) for the instance. network_config (google.cloud.alloydb_v1beta.types.Instance.InstanceNetworkConfig): - Optional. Instance level network + Optional. Instance-level network configuration. + gemini_config (google.cloud.alloydb_v1beta.types.GeminiInstanceConfig): + Optional. Configuration parameters related to + the Gemini in Databases add-on. outbound_public_ip_addresses (MutableSequence[str]): Output only. All outbound public IP addresses configured for the instance. @@ -1427,6 +1564,114 @@ class QueryInsightsInstanceConfig(proto.Message): optional=True, ) + class ObservabilityInstanceConfig(proto.Message): + r"""Observability Instance specific configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Observability feature status for an instance. + This flag is turned "off" by default. + + This field is a member of `oneof`_ ``_enabled``. + preserve_comments (bool): + Preserve comments in query string for an + instance. This flag is turned "off" by default. + + This field is a member of `oneof`_ ``_preserve_comments``. + track_wait_events (bool): + Track wait events during query execution for + an instance. This flag is turned "on" by default + but tracking is enabled only after observability + enabled flag is also turned on. + + This field is a member of `oneof`_ ``_track_wait_events``. + track_wait_event_types (bool): + Output only. Track wait event types during + query execution for an instance. This flag is + turned "on" by default but tracking is enabled + only after observability enabled flag is also + turned on. This is read-only flag and only + modifiable by producer API. + + This field is a member of `oneof`_ ``_track_wait_event_types``. + max_query_string_length (int): + Query string length. The default value is + 10k. + + This field is a member of `oneof`_ ``_max_query_string_length``. + record_application_tags (bool): + Record application tags for an instance. + This flag is turned "off" by default. + + This field is a member of `oneof`_ ``_record_application_tags``. + query_plans_per_minute (int): + Number of query execution plans captured by + Insights per minute for all queries combined. + The default value is 200. Any integer between 0 + to 200 is considered valid. + + This field is a member of `oneof`_ ``_query_plans_per_minute``. + track_active_queries (bool): + Track actively running queries on the + instance. If not set, this flag is "off" by + default. + + This field is a member of `oneof`_ ``_track_active_queries``. + track_client_address (bool): + Track client address for an instance. + If not set, default value is "off". + + This field is a member of `oneof`_ ``_track_client_address``. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + preserve_comments: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + track_wait_events: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + track_wait_event_types: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + max_query_string_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + record_application_tags: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + query_plans_per_minute: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + track_active_queries: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + track_client_address: bool = proto.Field( + proto.BOOL, + number=9, + optional=True, + ) + class ReadPoolConfig(proto.Message): r"""Configuration for a read pool instance. @@ -1480,7 +1725,7 @@ class ClientConnectionConfig(proto.Message): only (ex: AuthProxy) connections to the database. ssl_config (google.cloud.alloydb_v1beta.types.SslConfig): - Optional. SSL config option for this + Optional. SSL configuration option for this instance. """ @@ -1494,36 +1739,6 @@ class ClientConnectionConfig(proto.Message): message="SslConfig", ) - class PscInterfaceConfig(proto.Message): - r"""Configuration for setting up a PSC interface. This - information needs to be provided by the customer. - PSC interfaces will be created and added to VMs via SLM (adding - a network interface will require recreating the VM). For HA - instances this will be done via LDTM. - - Attributes: - consumer_endpoint_ips (MutableSequence[str]): - A list of endpoints in the consumer VPC the - interface might initiate outbound connections - to. This list has to be provided when the PSC - interface is created. - network_attachment (str): - The NetworkAttachment resource created in the consumer VPC - to which the PSC interface will be linked, in the form of: - ``projects/${CONSUMER_PROJECT}/regions/${REGION}/networkAttachments/${NETWORK_ATTACHMENT_NAME}``. - NetworkAttachment has to be provided when the PSC interface - is created. - """ - - consumer_endpoint_ips: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - network_attachment: str = proto.Field( - proto.STRING, - number=2, - ) - class PscInstanceConfig(proto.Message): r"""PscInstanceConfig contains PSC related configuration at an instance level. @@ -1538,27 +1753,10 @@ class PscInstanceConfig(proto.Message): Optional. List of consumer projects that are allowed to create PSC endpoints to service-attachments to this instance. - allowed_consumer_networks (MutableSequence[str]): - Optional. List of consumer networks that are - allowed to create PSC endpoints to - service-attachments to this instance. - psc_interface_configs (MutableSequence[google.cloud.alloydb_v1beta.types.Instance.PscInterfaceConfig]): - Optional. Configurations for setting up PSC - interfaces attached to the instance which are - used for outbound connectivity. Only primary - instances can have PSC interface attached. All - the VMs created for the primary instance will - share the same configurations. Currently we only - support 0 or 1 PSC interface. - outgoing_service_attachment_links (MutableSequence[str]): - Optional. List of service attachments that - this instance has created endpoints to connect - with. Currently, only a single outgoing service - attachment is supported per instance. - psc_enabled (bool): - Optional. Whether PSC connectivity is enabled - for this instance. This is populated by - referencing the value from the parent cluster. + psc_dns_name (str): + Output only. The DNS name of the instance for + PSC connectivity. Name convention: + ...alloydb-psc.goog """ service_attachment_link: str = proto.Field( @@ -1569,28 +1767,13 @@ class PscInstanceConfig(proto.Message): proto.STRING, number=2, ) - allowed_consumer_networks: MutableSequence[str] = proto.RepeatedField( + psc_dns_name: str = proto.Field( proto.STRING, - number=3, - ) - psc_interface_configs: MutableSequence[ - "Instance.PscInterfaceConfig" - ] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="Instance.PscInterfaceConfig", - ) - outgoing_service_attachment_links: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - psc_enabled: bool = proto.Field( - proto.BOOL, - number=6, + number=7, ) class InstanceNetworkConfig(proto.Message): - r"""Metadata related to instance level network configuration. + r"""Metadata related to instance-level network configuration. Attributes: authorized_external_networks (MutableSequence[google.cloud.alloydb_v1beta.types.Instance.InstanceNetworkConfig.AuthorizedNetwork]): @@ -1712,6 +1895,11 @@ class AuthorizedNetwork(proto.Message): number=21, message=QueryInsightsInstanceConfig, ) + observability_config: ObservabilityInstanceConfig = proto.Field( + proto.MESSAGE, + number=26, + message=ObservabilityInstanceConfig, + ) read_pool_config: ReadPoolConfig = proto.Field( proto.MESSAGE, number=14, @@ -1762,6 +1950,11 @@ class AuthorizedNetwork(proto.Message): number=29, message=InstanceNetworkConfig, ) + gemini_config: gemini.GeminiInstanceConfig = proto.Field( + proto.MESSAGE, + number=33, + message=gemini.GeminiInstanceConfig, + ) outbound_public_ip_addresses: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=34, @@ -1915,6 +2108,14 @@ class Backup(proto.Message): version of the cluster this backup was created from. Any restored cluster created from this backup will have the same database version. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag keys/values directly + bound to this resource. For example: + + :: + + "123/environment": "production", + "123/costCenter": "marketing". """ class State(proto.Enum): @@ -2096,6 +2297,11 @@ class QuantityBasedExpiry(proto.Message): number=22, enum="DatabaseVersion", ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=25, + ) class SupportedDatabaseFlag(proto.Message): @@ -2267,6 +2473,9 @@ class User(proto.Message): the PostgreSQL naming conventions. user_type (google.cloud.alloydb_v1beta.types.User.UserType): Optional. Type of this user. + keep_extra_roles (bool): + Input only. If the user already exists and it + has additional roles, keep them granted. """ class UserType(proto.Enum): @@ -2303,6 +2512,10 @@ class UserType(proto.Enum): number=5, enum=UserType, ) + keep_extra_roles: bool = proto.Field( + proto.BOOL, + number=6, + ) class Database(proto.Message): diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py index f9f1e3cb8b9b..40d1e9efa629 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py @@ -23,7 +23,7 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.alloydb_v1beta.types import resources +from google.cloud.alloydb_v1beta.types import data_model, resources __protobuf__ = proto.module( package="google.cloud.alloydb.v1beta", @@ -34,7 +34,10 @@ "CreateSecondaryClusterRequest", "CreateClusterRequest", "UpdateClusterRequest", + "UpgradeClusterRequest", + "UpgradeClusterResponse", "DeleteClusterRequest", + "SwitchoverClusterRequest", "PromoteClusterRequest", "RestoreClusterRequest", "ListInstancesRequest", @@ -52,6 +55,9 @@ "FailoverInstanceRequest", "InjectFaultRequest", "RestartInstanceRequest", + "ExecuteSqlRequest", + "ExecuteSqlResponse", + "ExecuteSqlMetadata", "ListBackupsRequest", "ListBackupsResponse", "GetBackupRequest", @@ -64,6 +70,8 @@ "GenerateClientCertificateResponse", "GetConnectionInfoRequest", "OperationMetadata", + "PromoteClusterStatus", + "UpgradeClusterStatus", "ListUsersRequest", "ListUsersResponse", "GetUserRequest", @@ -196,17 +204,17 @@ class CreateSecondaryClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -214,10 +222,10 @@ class CreateSecondaryClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -258,17 +266,17 @@ class CreateClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -276,10 +284,10 @@ class CreateClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -321,17 +329,17 @@ class UpdateClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -339,10 +347,10 @@ class UpdateClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - update request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. allow_missing (bool): Optional. If set to true, update succeeds even if cluster is not found. In that case, a new cluster is created and @@ -373,6 +381,284 @@ class UpdateClusterRequest(proto.Message): ) +class UpgradeClusterRequest(proto.Message): + r"""Upgrades a cluster. + + Attributes: + name (str): + Required. The resource name of the cluster. + version (google.cloud.alloydb_v1beta.types.DatabaseVersion): + Required. The version the cluster is going to + be upgraded to. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. + etag (str): + Optional. The current etag of the Cluster. + If an etag is provided and does not match the + current etag of the Cluster, upgrade will be + blocked and an ABORTED error will be returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=2, + enum=resources.DatabaseVersion, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + etag: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpgradeClusterResponse(proto.Message): + r"""UpgradeClusterResponse contains the response for upgrade + cluster operation. + + Attributes: + status (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Status): + Status of upgrade operation. + message (str): + A user friendly message summarising the + upgrade operation details and the next steps for + the user if there is any. + cluster_upgrade_details (MutableSequence[google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.ClusterUpgradeDetails]): + Array of upgrade details for the current + cluster and all the secondary clusters + associated with this cluster. + """ + + class Status(proto.Enum): + r"""Status of upgrade operation. + + Values: + STATUS_UNSPECIFIED (0): + Unspecified status. + NOT_STARTED (4): + Not started. + IN_PROGRESS (5): + In progress. + SUCCESS (1): + Operation succeeded. + FAILED (2): + Operation failed. + PARTIAL_SUCCESS (3): + Operation partially succeeded. + CANCEL_IN_PROGRESS (6): + Cancel is in progress. + CANCELLED (7): + Cancellation complete. + """ + STATUS_UNSPECIFIED = 0 + NOT_STARTED = 4 + IN_PROGRESS = 5 + SUCCESS = 1 + FAILED = 2 + PARTIAL_SUCCESS = 3 + CANCEL_IN_PROGRESS = 6 + CANCELLED = 7 + + class Stage(proto.Enum): + r"""Stage in the upgrade. + + Values: + STAGE_UNSPECIFIED (0): + Unspecified stage. + ALLOYDB_PRECHECK (1): + Pre-upgrade custom checks, not covered by pg_upgrade. + PG_UPGRADE_CHECK (2): + Pre-upgrade pg_upgrade checks. + PREPARE_FOR_UPGRADE (5): + Clone the original cluster. + PRIMARY_INSTANCE_UPGRADE (3): + Upgrade the primary instance(downtime). + READ_POOL_INSTANCES_UPGRADE (4): + This stage is read pool upgrade. + ROLLBACK (6): + Rollback in case of critical failures. + CLEANUP (7): + Cleanup. + """ + STAGE_UNSPECIFIED = 0 + ALLOYDB_PRECHECK = 1 + PG_UPGRADE_CHECK = 2 + PREPARE_FOR_UPGRADE = 5 + PRIMARY_INSTANCE_UPGRADE = 3 + READ_POOL_INSTANCES_UPGRADE = 4 + ROLLBACK = 6 + CLEANUP = 7 + + class StageInfo(proto.Message): + r"""Stage information for different stages in the upgrade + process. + + Attributes: + stage (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Stage): + The stage. + status (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Status): + Status of the stage. + logs_url (str): + logs_url is the URL for the logs associated with a stage if + that stage has logs. Right now, only three stages have logs: + ALLOYDB_PRECHECK, PG_UPGRADE_CHECK, + PRIMARY_INSTANCE_UPGRADE. + """ + + stage: "UpgradeClusterResponse.Stage" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeClusterResponse.Stage", + ) + status: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + logs_url: str = proto.Field( + proto.STRING, + number=3, + ) + + class InstanceUpgradeDetails(proto.Message): + r"""Details regarding the upgrade of instaces associated with a + cluster. + + Attributes: + name (str): + Normalized name of the instance. + upgrade_status (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Status): + Upgrade status of the instance. + instance_type (google.cloud.alloydb_v1beta.types.Instance.InstanceType): + Instance type. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + upgrade_status: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + instance_type: resources.Instance.InstanceType = proto.Field( + proto.ENUM, + number=3, + enum=resources.Instance.InstanceType, + ) + + class ClusterUpgradeDetails(proto.Message): + r"""Upgrade details of a cluster. This cluster can be primary or + secondary. + + Attributes: + name (str): + Normalized name of the cluster + upgrade_status (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Status): + Upgrade status of the cluster. + cluster_type (google.cloud.alloydb_v1beta.types.Cluster.ClusterType): + Cluster type which can either be primary or + secondary. + database_version (google.cloud.alloydb_v1beta.types.DatabaseVersion): + Database version of the cluster after the + upgrade operation. This will be the target + version if the upgrade was successful otherwise + it remains the same as that before the upgrade + operation. + stage_info (MutableSequence[google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.StageInfo]): + Array containing stage info associated with + this cluster. + instance_upgrade_details (MutableSequence[google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.InstanceUpgradeDetails]): + Upgrade details of the instances directly + associated with this cluster. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + upgrade_status: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + cluster_type: resources.Cluster.ClusterType = proto.Field( + proto.ENUM, + number=3, + enum=resources.Cluster.ClusterType, + ) + database_version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=4, + enum=resources.DatabaseVersion, + ) + stage_info: MutableSequence[ + "UpgradeClusterResponse.StageInfo" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="UpgradeClusterResponse.StageInfo", + ) + instance_upgrade_details: MutableSequence[ + "UpgradeClusterResponse.InstanceUpgradeDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="UpgradeClusterResponse.InstanceUpgradeDetails", + ) + + status: Status = proto.Field( + proto.ENUM, + number=1, + enum=Status, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_upgrade_details: MutableSequence[ + ClusterUpgradeDetails + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=ClusterUpgradeDetails, + ) + + class DeleteClusterRequest(proto.Message): r"""Message for deleting a Cluster @@ -384,17 +670,17 @@ class DeleteClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -407,10 +693,10 @@ class DeleteClusterRequest(proto.Message): current etag of the Cluster, deletion will be blocked and an ABORTED error will be returned. validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - delete. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. force (bool): Optional. Whether to cascade delete child instances for given cluster. @@ -438,6 +724,55 @@ class DeleteClusterRequest(proto.Message): ) +class SwitchoverClusterRequest(proto.Message): + r"""Message for switching over to a cluster + + Attributes: + name (str): + Required. The name of the resource. For the + required format, see the comment on the + Cluster.name field + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class PromoteClusterRequest(proto.Message): r"""Message for promoting a Cluster @@ -449,10 +784,10 @@ class PromoteClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -472,10 +807,10 @@ class PromoteClusterRequest(proto.Message): current etag of the Cluster, deletion will be blocked and an ABORTED error will be returned. validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - delete. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ name: str = proto.Field( @@ -529,17 +864,17 @@ class RestoreClusterRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -547,10 +882,10 @@ class RestoreClusterRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - import request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ backup_source: resources.BackupSource = proto.Field( @@ -706,17 +1041,17 @@ class CreateInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -724,10 +1059,10 @@ class CreateInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -768,17 +1103,17 @@ class CreateSecondaryInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -786,10 +1121,10 @@ class CreateSecondaryInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - create request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ parent: str = proto.Field( @@ -845,17 +1180,17 @@ class BatchCreateInstancesRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -983,7 +1318,7 @@ class State(proto.Enum): The state of the instance is unknown. PENDING_CREATE (1): Instance is pending creation and has not yet - been picked up for processsing in the backend. + been picked up for processing in the backend. READY (2): The instance is active and running. CREATING (3): @@ -1044,17 +1379,17 @@ class UpdateInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1062,10 +1397,10 @@ class UpdateInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - update request. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. allow_missing (bool): Optional. If set to true, update succeeds even if instance is not found. In that case, a new instance is created and @@ -1107,17 +1442,17 @@ class DeleteInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1130,10 +1465,10 @@ class DeleteInstanceRequest(proto.Message): current etag of the Instance, deletion will be blocked and an ABORTED error will be returned. validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - delete. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ name: str = proto.Field( @@ -1165,17 +1500,17 @@ class FailoverInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1183,10 +1518,10 @@ class FailoverInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - failover. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ name: str = proto.Field( @@ -1217,17 +1552,17 @@ class InjectFaultRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1235,10 +1570,10 @@ class InjectFaultRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - fault injection. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. """ class FaultType(proto.Enum): @@ -1284,17 +1619,17 @@ class RestartInstanceRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1302,10 +1637,14 @@ class RestartInstanceRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). validate_only (bool): - Optional. If set, performs request validation - (e.g. permission checks and any other type of - validation), but do not actually execute the - restart. + Optional. If set, performs request + validation, for example, permission checks and + any other type of validation, but does not + actually execute the create request. + node_ids (MutableSequence[str]): + Optional. Full name of the nodes as obtained from + INSTANCE_VIEW_FULL to restart upon. Applicable only to read + instances. """ name: str = proto.Field( @@ -1320,6 +1659,162 @@ class RestartInstanceRequest(proto.Message): proto.BOOL, number=3, ) + node_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class ExecuteSqlRequest(proto.Message): + r"""Request for ExecuteSql rpc. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + password (str): + Optional. The database native user’s + password. + + This field is a member of `oneof`_ ``user_credential``. + instance (str): + Required. The instance where the SQL will be + executed. For the required format, see the + comment on the Instance.name field. + database (str): + Required. Name of the database where the query will be + executed. Note - Value provided should be the same as + expected from ``SELECT current_database();`` and NOT as a + resource reference. + user (str): + Required. Database user to be used for executing the SQL. + Note - Value provided should be the same as expected from + ``SELECT current_user;`` and NOT as a resource reference. + sql_statement (str): + Required. SQL statement to execute on + database. Any valid statement is permitted, + including DDL, DML, DQL statements. + """ + + password: str = proto.Field( + proto.STRING, + number=5, + oneof="user_credential", + ) + instance: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + user: str = proto.Field( + proto.STRING, + number=3, + ) + sql_statement: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ExecuteSqlResponse(proto.Message): + r"""Execute a SQL statement response. + + Attributes: + sql_results (MutableSequence[google.cloud.alloydb_v1beta.types.SqlResult]): + SqlResult represents the results for the + execution of sql statements. + metadata (google.cloud.alloydb_v1beta.types.ExecuteSqlMetadata): + Any additional metadata information regarding + the execution of the sql statement. + """ + + sql_results: MutableSequence[data_model.SqlResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data_model.SqlResult, + ) + metadata: "ExecuteSqlMetadata" = proto.Field( + proto.MESSAGE, + number=3, + message="ExecuteSqlMetadata", + ) + + +class ExecuteSqlMetadata(proto.Message): + r"""Any additional metadata information regarding the execution + of the SQL + + Attributes: + message (str): + Message related to SQL execution. Marked as + core content since it can potentially contain + details related to the query or result set. This + field can be used to convey messages such as + "when the SQL result set exceeds the acceptable + response size limits.". + partial_result (bool): + Set to true if SQL returned a result set + larger than the acceptable response size limits + and the result was truncated. + sql_statement_execution_duration (google.protobuf.duration_pb2.Duration): + The time duration taken to execute the sql + statement. + status (google.cloud.alloydb_v1beta.types.ExecuteSqlMetadata.Status): + Status of SQL execution. + """ + + class Status(proto.Enum): + r"""Status contains all valid Status a SQL execution can end up + in. + + Values: + STATUS_UNSPECIFIED (0): + The status is unknown. + OK (1): + No error during SQL execution i.e. All SQL + statements ran to completion. The "message" will + be empty. + PARTIAL (2): + Same as OK, except indicates that only + partial results were returned. The "message" + field will contain details on why results were + truncated. + ERROR (3): + Error during SQL execution. Atleast 1 SQL + statement execution resulted in a error. Side + effects of other statements are rolled back. + The "message" field will contain human readable + error given by Postgres of the first bad SQL + statement. SQL execution errors don't constitute + API errors as defined in + https://google.aip.dev/193 but will be returned + as part of this message. + """ + STATUS_UNSPECIFIED = 0 + OK = 1 + PARTIAL = 2 + ERROR = 3 + + message: str = proto.Field( + proto.STRING, + number=1, + ) + partial_result: bool = proto.Field( + proto.BOOL, + number=2, + ) + sql_statement_execution_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + status: Status = proto.Field( + proto.ENUM, + number=4, + enum=Status, + ) class ListBackupsRequest(proto.Message): @@ -1422,17 +1917,17 @@ class CreateBackupRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1483,17 +1978,17 @@ class UpdateBackupRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1544,17 +2039,17 @@ class DeleteBackupRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1668,17 +2163,17 @@ class GenerateClientCertificateRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1774,17 +2269,17 @@ class GetConnectionInfoRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1806,6 +2301,11 @@ class GetConnectionInfoRequest(proto.Message): class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -1813,6 +2313,16 @@ class OperationMetadata(proto.Message): Output only. BatchCreateInstances related metadata. + This field is a member of `oneof`_ ``request_specific``. + promote_cluster_status (google.cloud.alloydb_v1beta.types.PromoteClusterStatus): + Output only. PromoteClusterStatus related + metadata. + + This field is a member of `oneof`_ ``request_specific``. + upgrade_cluster_status (google.cloud.alloydb_v1beta.types.UpgradeClusterStatus): + Output only. UpgradeClusterStatus related + metadata. + This field is a member of `oneof`_ ``request_specific``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was @@ -1846,6 +2356,18 @@ class OperationMetadata(proto.Message): oneof="request_specific", message="BatchCreateInstancesMetadata", ) + promote_cluster_status: "PromoteClusterStatus" = proto.Field( + proto.MESSAGE, + number=9, + oneof="request_specific", + message="PromoteClusterStatus", + ) + upgrade_cluster_status: "UpgradeClusterStatus" = proto.Field( + proto.MESSAGE, + number=10, + oneof="request_specific", + message="UpgradeClusterStatus", + ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, @@ -1878,6 +2400,178 @@ class OperationMetadata(proto.Message): ) +class PromoteClusterStatus(proto.Message): + r"""Message for current status of the database during Promote + Cluster operation. + + Attributes: + state (google.cloud.alloydb_v1beta.types.PromoteClusterStatus.State): + Output only. The current state of the promoted secondary's + database. Once the operation is complete, the final state of + the database in the LRO can be one of: + + 1. PROMOTE_CLUSTER_AVAILABLE_FOR_READ, indicating that the + promote operation has failed mid-way, the database is + still only available for read. + 2. PROMOTE_CLUSTER_COMPLETED, indicating that a promote + operation completed successfully. The database is + available for both read and write requests + """ + + class State(proto.Enum): + r"""State contains all valid states of the database during + promote cluster operation. This is used for status reporting + through the LRO metadata. + + Values: + STATE_UNSPECIFIED (0): + The state of the database is unknown. + PROMOTE_CLUSTER_AVAILABLE_FOR_READ (1): + The database is only available for read. + PROMOTE_CLUSTER_AVAILABLE_FOR_WRITE (2): + The database is available for both read and + write. The promote operation is near completion. + PROMOTE_CLUSTER_COMPLETED (3): + The promote operation is completed and the + database is available for write. + """ + STATE_UNSPECIFIED = 0 + PROMOTE_CLUSTER_AVAILABLE_FOR_READ = 1 + PROMOTE_CLUSTER_AVAILABLE_FOR_WRITE = 2 + PROMOTE_CLUSTER_COMPLETED = 3 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + + +class UpgradeClusterStatus(proto.Message): + r"""Message for current status of the Major Version Upgrade + operation. + + Attributes: + state (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Status): + Cluster Major Version Upgrade state. + cancellable (bool): + Whether the operation is cancellable. + source_version (google.cloud.alloydb_v1beta.types.DatabaseVersion): + Source database major version. + target_version (google.cloud.alloydb_v1beta.types.DatabaseVersion): + Target database major version. + stages (MutableSequence[google.cloud.alloydb_v1beta.types.UpgradeClusterStatus.StageStatus]): + Status of all upgrade stages. + """ + + class StageStatus(proto.Message): + r"""Status of an upgrade stage. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + read_pool_instances_upgrade (google.cloud.alloydb_v1beta.types.UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus): + Read pool instances upgrade metadata. + + This field is a member of `oneof`_ ``stage_specific_status``. + stage (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Stage): + Upgrade stage. + state (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Status): + State of this stage. + """ + + read_pool_instances_upgrade: "UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus" = proto.Field( + proto.MESSAGE, + number=11, + oneof="stage_specific_status", + message="UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus", + ) + stage: "UpgradeClusterResponse.Stage" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeClusterResponse.Stage", + ) + state: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeClusterResponse.Status", + ) + + class ReadPoolInstancesUpgradeStageStatus(proto.Message): + r"""Read pool instances upgrade specific status. + + Attributes: + upgrade_stats (google.cloud.alloydb_v1beta.types.UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus.Stats): + Read pool instances upgrade statistics. + """ + + class Stats(proto.Message): + r"""Upgrade stats for read pool instances. + + Attributes: + not_started (int): + Number of read pool instances for which + upgrade has not started. + ongoing (int): + Number of read pool instances undergoing + upgrade. + success (int): + Number of read pool instances successfully + upgraded. + failed (int): + Number of read pool instances which failed to + upgrade. + """ + + not_started: int = proto.Field( + proto.INT32, + number=1, + ) + ongoing: int = proto.Field( + proto.INT32, + number=2, + ) + success: int = proto.Field( + proto.INT32, + number=3, + ) + failed: int = proto.Field( + proto.INT32, + number=4, + ) + + upgrade_stats: "UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus.Stats" = proto.Field( + proto.MESSAGE, + number=1, + message="UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus.Stats", + ) + + state: "UpgradeClusterResponse.Status" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeClusterResponse.Status", + ) + cancellable: bool = proto.Field( + proto.BOOL, + number=2, + ) + source_version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=3, + enum=resources.DatabaseVersion, + ) + target_version: resources.DatabaseVersion = proto.Field( + proto.ENUM, + number=4, + enum=resources.DatabaseVersion, + ) + stages: MutableSequence[StageStatus] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=StageStatus, + ) + + class ListUsersRequest(proto.Message): r"""Message for requesting list of Users @@ -1981,17 +2675,17 @@ class CreateUserRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -2042,17 +2736,17 @@ class UpdateUserRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -2102,17 +2796,17 @@ class DeleteUserRequest(proto.Message): request_id (str): Optional. An optional request ID to identify requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. + you must retry your request, the server ignores + the request if it has already been completed. + The server guarantees that for at least 60 + minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. @@ -2146,12 +2840,10 @@ class ListDatabasesRequest(proto.Message): Required. Parent value for ListDatabasesRequest. page_size (int): - Optional. The maximum number of databases to - return. The service may return fewer than this - value. If unspecified, an appropriate number of - databases will be returned. The max value will - be 2000, values above max will be coerced to - max. + Optional. The maximum number of databases to return. The + service may return fewer than this value. If unspecified, + 2000 is the default page_size. The max value of page_size + will be 4000, values above max will be coerced to max. page_token (str): Optional. A page token, received from a previous ``ListDatabases`` call. This should be provided to retrieve diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_execute_sql_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_execute_sql_async.py new file mode 100644 index 000000000000..8d84781c90e1 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_execute_sql_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ExecuteSql_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_execute_sql(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ExecuteSql_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_execute_sql_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_execute_sql_sync.py new file mode 100644 index 000000000000..f2ca4446953e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_execute_sql_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ExecuteSql_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_execute_sql(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ExecuteSql_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_async.py new file mode 100644 index 000000000000..460e16331cc5 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchoverCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_SwitchoverCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_switchover_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_SwitchoverCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_sync.py new file mode 100644 index 000000000000..0e40e10cb6d4 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchoverCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_SwitchoverCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_switchover_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_SwitchoverCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py new file mode 100644 index 000000000000..7f498e5a8488 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpgradeCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpgradeCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpgradeCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py new file mode 100644 index 000000000000..023383ed6870 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpgradeCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_UpgradeCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_UpgradeCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_execute_sql_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_execute_sql_async.py new file mode 100644 index 000000000000..44835bc0f129 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_execute_sql_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ExecuteSql_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_execute_sql(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ExecuteSql_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_execute_sql_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_execute_sql_sync.py new file mode 100644 index 000000000000..310459ef55b1 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_execute_sql_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_ExecuteSql_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_execute_sql(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_ExecuteSql_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_async.py new file mode 100644 index 000000000000..75e23a27f9e2 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchoverCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_SwitchoverCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_switchover_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_SwitchoverCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_sync.py new file mode 100644 index 000000000000..df282a459110 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchoverCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_SwitchoverCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_switchover_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.SwitchoverClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.switchover_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_SwitchoverCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py new file mode 100644 index 000000000000..6804e370610e --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpgradeCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpgradeCluster_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpgradeCluster_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py new file mode 100644 index 000000000000..0913b1acaf28 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpgradeCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_UpgradeCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_upgrade_cluster(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.UpgradeClusterRequest( + name="name_value", + version="POSTGRES_16", + ) + + # Make the request + operation = client.upgrade_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_UpgradeCluster_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index 97f6c465c6c3..ba0a9bd4cff5 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.15" + "version": "0.4.0" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index f72c51c8e224..33d4ab89d6f1 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.15" + "version": "0.4.0" }, "snippets": [ { @@ -1864,6 +1864,199 @@ ], "title": "alloydb_v1alpha_generated_alloy_db_admin_delete_user_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.execute_sql", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ExecuteSql", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ExecuteSqlRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "user", + "type": "str" + }, + { + "name": "sql_statement", + "type": "str" + }, + { + "name": "password", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.ExecuteSqlResponse", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "alloydb_v1alpha_generated_alloy_db_admin_execute_sql_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ExecuteSql_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_execute_sql_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.execute_sql", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ExecuteSql", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ExecuteSqlRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "user", + "type": "str" + }, + { + "name": "sql_statement", + "type": "str" + }, + { + "name": "password", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.ExecuteSqlResponse", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "alloydb_v1alpha_generated_alloy_db_admin_execute_sql_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ExecuteSql_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_execute_sql_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4601,6 +4794,167 @@ ], "title": "alloydb_v1alpha_generated_alloy_db_admin_restore_cluster_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.switchover_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.SwitchoverCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "SwitchoverCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.SwitchoverClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "switchover_cluster" + }, + "description": "Sample for SwitchoverCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_SwitchoverCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.switchover_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.SwitchoverCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "SwitchoverCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.SwitchoverClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "switchover_cluster" + }, + "description": "Sample for SwitchoverCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_SwitchoverCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_switchover_cluster_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5276,6 +5630,175 @@ } ], "title": "alloydb_v1alpha_generated_alloy_db_admin_update_user_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.upgrade_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpgradeCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpgradeCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpgradeClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.alloydb_v1alpha.types.DatabaseVersion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "upgrade_cluster" + }, + "description": "Sample for UpgradeCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpgradeCluster_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.upgrade_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.UpgradeCluster", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpgradeCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.UpgradeClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.alloydb_v1alpha.types.DatabaseVersion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "upgrade_cluster" + }, + "description": "Sample for UpgradeCluster", + "file": "alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_UpgradeCluster_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py" } ] } diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index 7a908eddae8e..3ad53914353d 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.15" + "version": "0.4.0" }, "snippets": [ { @@ -1864,6 +1864,199 @@ ], "title": "alloydb_v1beta_generated_alloy_db_admin_delete_user_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.execute_sql", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ExecuteSql", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ExecuteSqlRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "user", + "type": "str" + }, + { + "name": "sql_statement", + "type": "str" + }, + { + "name": "password", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.ExecuteSqlResponse", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "alloydb_v1beta_generated_alloy_db_admin_execute_sql_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ExecuteSql_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_execute_sql_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.execute_sql", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.ExecuteSql", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.ExecuteSqlRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "user", + "type": "str" + }, + { + "name": "sql_statement", + "type": "str" + }, + { + "name": "password", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.ExecuteSqlResponse", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "alloydb_v1beta_generated_alloy_db_admin_execute_sql_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_ExecuteSql_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_execute_sql_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4601,6 +4794,167 @@ ], "title": "alloydb_v1beta_generated_alloy_db_admin_restore_cluster_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.switchover_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.SwitchoverCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "SwitchoverCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.SwitchoverClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "switchover_cluster" + }, + "description": "Sample for SwitchoverCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_SwitchoverCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.switchover_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.SwitchoverCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "SwitchoverCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.SwitchoverClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "switchover_cluster" + }, + "description": "Sample for SwitchoverCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_SwitchoverCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_switchover_cluster_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5276,6 +5630,175 @@ } ], "title": "alloydb_v1beta_generated_alloy_db_admin_update_user_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.upgrade_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpgradeCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpgradeCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpgradeClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.alloydb_v1beta.types.DatabaseVersion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "upgrade_cluster" + }, + "description": "Sample for UpgradeCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpgradeCluster_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.upgrade_cluster", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.UpgradeCluster", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "UpgradeCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.UpgradeClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.alloydb_v1beta.types.DatabaseVersion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "upgrade_cluster" + }, + "description": "Sample for UpgradeCluster", + "file": "alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_UpgradeCluster_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py" } ] } diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py index 8d3b0dba519a..1b9db9c1fbd0 100644 --- a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py @@ -50,6 +50,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), 'delete_user': ('name', 'request_id', 'validate_only', ), + 'execute_sql': ('instance', 'database', 'user', 'sql_statement', 'password', ), 'failover_instance': ('name', 'request_id', 'validate_only', ), 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', 'public_key', 'use_metadata_exchange', ), 'get_backup': ('name', ), @@ -65,12 +66,14 @@ class alloydbCallTransformer(cst.CSTTransformer): 'list_supported_database_flags': ('parent', 'page_size', 'page_token', ), 'list_users': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'promote_cluster': ('name', 'request_id', 'etag', 'validate_only', ), - 'restart_instance': ('name', 'request_id', 'validate_only', ), + 'restart_instance': ('name', 'request_id', 'validate_only', 'node_ids', ), 'restore_cluster': ('parent', 'cluster_id', 'cluster', 'backup_source', 'continuous_backup_source', 'request_id', 'validate_only', ), + 'switchover_cluster': ('name', 'request_id', 'validate_only', ), 'update_backup': ('backup', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), 'update_cluster': ('cluster', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), 'update_instance': ('instance', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), 'update_user': ('user', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'upgrade_cluster': ('name', 'version', 'request_id', 'validate_only', 'etag', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py index 8d3b0dba519a..1b9db9c1fbd0 100644 --- a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py @@ -50,6 +50,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), 'delete_user': ('name', 'request_id', 'validate_only', ), + 'execute_sql': ('instance', 'database', 'user', 'sql_statement', 'password', ), 'failover_instance': ('name', 'request_id', 'validate_only', ), 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', 'public_key', 'use_metadata_exchange', ), 'get_backup': ('name', ), @@ -65,12 +66,14 @@ class alloydbCallTransformer(cst.CSTTransformer): 'list_supported_database_flags': ('parent', 'page_size', 'page_token', ), 'list_users': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'promote_cluster': ('name', 'request_id', 'etag', 'validate_only', ), - 'restart_instance': ('name', 'request_id', 'validate_only', ), + 'restart_instance': ('name', 'request_id', 'validate_only', 'node_ids', ), 'restore_cluster': ('parent', 'cluster_id', 'cluster', 'backup_source', 'continuous_backup_source', 'request_id', 'validate_only', ), + 'switchover_cluster': ('name', 'request_id', 'validate_only', ), 'update_backup': ('backup', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), 'update_cluster': ('cluster', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), 'update_instance': ('instance', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), 'update_user': ('user', 'update_mask', 'request_id', 'validate_only', 'allow_missing', ), + 'upgrade_cluster': ('name', 'version', 'request_id', 'validate_only', 'etag', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py index 78403dac756e..a86f8d80f942 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -323,86 +323,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), - (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index aece66425791..01eeddae08d1 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -78,7 +78,13 @@ pagers, transports, ) -from google.cloud.alloydb_v1alpha.types import resources, service +from google.cloud.alloydb_v1alpha.types import ( + csql_resources, + data_model, + gemini, + resources, + service, +) async def mock_async_gen(data, chunk_size=1): @@ -323,86 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), - (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1696,6 +1622,7 @@ def test_get_cluster(request_type, transport: str = "grpc"): reconciling=True, satisfies_pzi=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) response = client.get_cluster(request) @@ -1718,6 +1645,7 @@ def test_get_cluster(request_type, transport: str = "grpc"): assert response.reconciling is True assert response.satisfies_pzi is True assert response.satisfies_pzs is True + assert response.subscription_type == resources.SubscriptionType.STANDARD def test_get_cluster_non_empty_request_with_auto_populated_field(): @@ -1854,6 +1782,7 @@ async def test_get_cluster_async( reconciling=True, satisfies_pzi=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) ) response = await client.get_cluster(request) @@ -1877,6 +1806,7 @@ async def test_get_cluster_async( assert response.reconciling is True assert response.satisfies_pzi is True assert response.satisfies_pzs is True + assert response.subscription_type == resources.SubscriptionType.STANDARD @pytest.mark.asyncio @@ -2738,11 +2668,11 @@ async def test_update_cluster_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.DeleteClusterRequest, + service.UpgradeClusterRequest, dict, ], ) -def test_delete_cluster(request_type, transport: str = "grpc"): +def test_upgrade_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2753,22 +2683,22 @@ def test_delete_cluster(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_cluster(request) + response = client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_cluster_non_empty_request_with_auto_populated_field(): +def test_upgrade_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -2779,28 +2709,26 @@ def test_delete_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteClusterRequest( + request = service.UpgradeClusterRequest( name="name_value", - request_id="request_id_value", etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_cluster(request=request) + client.upgrade_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteClusterRequest( + assert args[0] == service.UpgradeClusterRequest( name="name_value", - request_id="request_id_value", etag="etag_value", ) -def test_delete_cluster_use_cached_wrapped_rpc(): +def test_upgrade_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2814,16 +2742,16 @@ def test_delete_cluster_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cluster in client._transport._wrapped_methods + assert client._transport.upgrade_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.upgrade_cluster] = mock_rpc request = {} - client.delete_cluster(request) + client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2833,7 +2761,7 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_cluster(request) + client.upgrade_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2841,7 +2769,7 @@ def test_delete_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_cluster_async_use_cached_wrapped_rpc( +async def test_upgrade_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2858,7 +2786,7 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_cluster + client._client._transport.upgrade_cluster in client._client._transport._wrapped_methods ) @@ -2866,11 +2794,11 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_cluster + client._client._transport.upgrade_cluster ] = mock_rpc request = {} - await client.delete_cluster(request) + await client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2880,7 +2808,7 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_cluster(request) + await client.upgrade_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2888,8 +2816,8 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_cluster_async( - transport: str = "grpc_asyncio", request_type=service.DeleteClusterRequest +async def test_upgrade_cluster_async( + transport: str = "grpc_asyncio", request_type=service.UpgradeClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -2901,17 +2829,17 @@ async def test_delete_cluster_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_cluster(request) + response = await client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -2919,25 +2847,25 @@ async def test_delete_cluster_async( @pytest.mark.asyncio -async def test_delete_cluster_async_from_dict(): - await test_delete_cluster_async(request_type=dict) +async def test_upgrade_cluster_async_from_dict(): + await test_upgrade_cluster_async(request_type=dict) -def test_delete_cluster_field_headers(): +def test_upgrade_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_cluster(request) + client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2953,23 +2881,23 @@ def test_delete_cluster_field_headers(): @pytest.mark.asyncio -async def test_delete_cluster_field_headers_async(): +async def test_upgrade_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_cluster(request) + await client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2984,19 +2912,20 @@ async def test_delete_cluster_field_headers_async(): ) in kw["metadata"] -def test_delete_cluster_flattened(): +def test_upgrade_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_cluster( + client.upgrade_cluster( name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) # Establish that the underlying call was made with the expected @@ -3006,9 +2935,12 @@ def test_delete_cluster_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].version + mock_val = resources.DatabaseVersion.POSTGRES_13 + assert arg == mock_val -def test_delete_cluster_flattened_error(): +def test_upgrade_cluster_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3016,20 +2948,21 @@ def test_delete_cluster_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - service.DeleteClusterRequest(), + client.upgrade_cluster( + service.UpgradeClusterRequest(), name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) @pytest.mark.asyncio -async def test_delete_cluster_flattened_async(): +async def test_upgrade_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3038,8 +2971,9 @@ async def test_delete_cluster_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_cluster( + response = await client.upgrade_cluster( name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) # Establish that the underlying call was made with the expected @@ -3049,10 +2983,13 @@ async def test_delete_cluster_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].version + mock_val = resources.DatabaseVersion.POSTGRES_13 + assert arg == mock_val @pytest.mark.asyncio -async def test_delete_cluster_flattened_error_async(): +async def test_upgrade_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3060,20 +2997,21 @@ async def test_delete_cluster_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_cluster( - service.DeleteClusterRequest(), + await client.upgrade_cluster( + service.UpgradeClusterRequest(), name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) @pytest.mark.parametrize( "request_type", [ - service.PromoteClusterRequest, + service.DeleteClusterRequest, dict, ], ) -def test_promote_cluster(request_type, transport: str = "grpc"): +def test_delete_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3084,22 +3022,22 @@ def test_promote_cluster(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.promote_cluster(request) + response = client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_promote_cluster_non_empty_request_with_auto_populated_field(): +def test_delete_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -3110,28 +3048,28 @@ def test_promote_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.PromoteClusterRequest( + request = service.DeleteClusterRequest( name="name_value", request_id="request_id_value", etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.promote_cluster(request=request) + client.delete_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.PromoteClusterRequest( + assert args[0] == service.DeleteClusterRequest( name="name_value", request_id="request_id_value", etag="etag_value", ) -def test_promote_cluster_use_cached_wrapped_rpc(): +def test_delete_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3145,16 +3083,16 @@ def test_promote_cluster_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.promote_cluster in client._transport._wrapped_methods + assert client._transport.delete_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc request = {} - client.promote_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3164,7 +3102,7 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.promote_cluster(request) + client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3172,7 +3110,7 @@ def test_promote_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_promote_cluster_async_use_cached_wrapped_rpc( +async def test_delete_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3189,7 +3127,7 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.promote_cluster + client._client._transport.delete_cluster in client._client._transport._wrapped_methods ) @@ -3197,11 +3135,11 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.promote_cluster + client._client._transport.delete_cluster ] = mock_rpc request = {} - await client.promote_cluster(request) + await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3211,7 +3149,7 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.promote_cluster(request) + await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3219,8 +3157,8 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_promote_cluster_async( - transport: str = "grpc_asyncio", request_type=service.PromoteClusterRequest +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=service.DeleteClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -3232,17 +3170,17 @@ async def test_promote_cluster_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.promote_cluster(request) + response = await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3250,25 +3188,25 @@ async def test_promote_cluster_async( @pytest.mark.asyncio -async def test_promote_cluster_async_from_dict(): - await test_promote_cluster_async(request_type=dict) +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) -def test_promote_cluster_field_headers(): +def test_delete_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.promote_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3284,23 +3222,23 @@ def test_promote_cluster_field_headers(): @pytest.mark.asyncio -async def test_promote_cluster_field_headers_async(): +async def test_delete_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.promote_cluster(request) + await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3315,18 +3253,18 @@ async def test_promote_cluster_field_headers_async(): ) in kw["metadata"] -def test_promote_cluster_flattened(): +def test_delete_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.promote_cluster( + client.delete_cluster( name="name_value", ) @@ -3339,7 +3277,7 @@ def test_promote_cluster_flattened(): assert arg == mock_val -def test_promote_cluster_flattened_error(): +def test_delete_cluster_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3347,20 +3285,20 @@ def test_promote_cluster_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.promote_cluster( - service.PromoteClusterRequest(), + client.delete_cluster( + service.DeleteClusterRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_promote_cluster_flattened_async(): +async def test_delete_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3369,7 +3307,7 @@ async def test_promote_cluster_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.promote_cluster( + response = await client.delete_cluster( name="name_value", ) @@ -3383,7 +3321,7 @@ async def test_promote_cluster_flattened_async(): @pytest.mark.asyncio -async def test_promote_cluster_flattened_error_async(): +async def test_delete_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3391,8 +3329,8 @@ async def test_promote_cluster_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.promote_cluster( - service.PromoteClusterRequest(), + await client.delete_cluster( + service.DeleteClusterRequest(), name="name_value", ) @@ -3400,11 +3338,11 @@ async def test_promote_cluster_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.RestoreClusterRequest, + service.PromoteClusterRequest, dict, ], ) -def test_restore_cluster(request_type, transport: str = "grpc"): +def test_promote_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3415,22 +3353,22 @@ def test_restore_cluster(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.restore_cluster(request) + response = client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_restore_cluster_non_empty_request_with_auto_populated_field(): +def test_promote_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -3441,28 +3379,28 @@ def test_restore_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.RestoreClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + request = service.PromoteClusterRequest( + name="name_value", request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.restore_cluster(request=request) + client.promote_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.RestoreClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + assert args[0] == service.PromoteClusterRequest( + name="name_value", request_id="request_id_value", + etag="etag_value", ) -def test_restore_cluster_use_cached_wrapped_rpc(): +def test_promote_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3476,16 +3414,16 @@ def test_restore_cluster_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_cluster in client._transport._wrapped_methods + assert client._transport.promote_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.restore_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc request = {} - client.restore_cluster(request) + client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3495,7 +3433,7 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.restore_cluster(request) + client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3503,7 +3441,7 @@ def test_restore_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_restore_cluster_async_use_cached_wrapped_rpc( +async def test_promote_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3520,7 +3458,7 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.restore_cluster + client._client._transport.promote_cluster in client._client._transport._wrapped_methods ) @@ -3528,11 +3466,11 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.restore_cluster + client._client._transport.promote_cluster ] = mock_rpc request = {} - await client.restore_cluster(request) + await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3542,7 +3480,7 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.restore_cluster(request) + await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3550,8 +3488,8 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_restore_cluster_async( - transport: str = "grpc_asyncio", request_type=service.RestoreClusterRequest +async def test_promote_cluster_async( + transport: str = "grpc_asyncio", request_type=service.PromoteClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -3563,17 +3501,17 @@ async def test_restore_cluster_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.restore_cluster(request) + response = await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3581,25 +3519,25 @@ async def test_restore_cluster_async( @pytest.mark.asyncio -async def test_restore_cluster_async_from_dict(): - await test_restore_cluster_async(request_type=dict) +async def test_promote_cluster_async_from_dict(): + await test_promote_cluster_async(request_type=dict) -def test_restore_cluster_field_headers(): +def test_promote_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_cluster(request) + client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3610,28 +3548,28 @@ def test_restore_cluster_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_restore_cluster_field_headers_async(): +async def test_promote_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.restore_cluster(request) + await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3642,18 +3580,100 @@ async def test_restore_cluster_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] +def test_promote_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_promote_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ - service.CreateSecondaryClusterRequest, + service.SwitchoverClusterRequest, dict, ], ) -def test_create_secondary_cluster(request_type, transport: str = "grpc"): +def test_switchover_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3665,23 +3685,23 @@ def test_create_secondary_cluster(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_secondary_cluster(request) + response = client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_secondary_cluster_non_empty_request_with_auto_populated_field(): +def test_switchover_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -3692,30 +3712,28 @@ def test_create_secondary_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateSecondaryClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + request = service.SwitchoverClusterRequest( + name="name_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_secondary_cluster(request=request) + client.switchover_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateSecondaryClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + assert args[0] == service.SwitchoverClusterRequest( + name="name_value", request_id="request_id_value", ) -def test_create_secondary_cluster_use_cached_wrapped_rpc(): +def test_switchover_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3730,8 +3748,7 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_secondary_cluster - in client._transport._wrapped_methods + client._transport.switchover_cluster in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -3740,10 +3757,10 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_secondary_cluster + client._transport.switchover_cluster ] = mock_rpc request = {} - client.create_secondary_cluster(request) + client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3753,7 +3770,7 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_secondary_cluster(request) + client.switchover_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3761,7 +3778,7 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( +async def test_switchover_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3778,7 +3795,7 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_secondary_cluster + client._client._transport.switchover_cluster in client._client._transport._wrapped_methods ) @@ -3786,11 +3803,11 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_secondary_cluster + client._client._transport.switchover_cluster ] = mock_rpc request = {} - await client.create_secondary_cluster(request) + await client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3800,7 +3817,7 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_secondary_cluster(request) + await client.switchover_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3808,8 +3825,8 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_secondary_cluster_async( - transport: str = "grpc_asyncio", request_type=service.CreateSecondaryClusterRequest +async def test_switchover_cluster_async( + transport: str = "grpc_asyncio", request_type=service.SwitchoverClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -3822,18 +3839,18 @@ async def test_create_secondary_cluster_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_secondary_cluster(request) + response = await client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3841,27 +3858,27 @@ async def test_create_secondary_cluster_async( @pytest.mark.asyncio -async def test_create_secondary_cluster_async_from_dict(): - await test_create_secondary_cluster_async(request_type=dict) +async def test_switchover_cluster_async_from_dict(): + await test_switchover_cluster_async(request_type=dict) -def test_create_secondary_cluster_field_headers(): +def test_switchover_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_secondary_cluster(request) + client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3872,30 +3889,30 @@ def test_create_secondary_cluster_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_secondary_cluster_field_headers_async(): +async def test_switchover_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_secondary_cluster(request) + await client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3906,49 +3923,37 @@ async def test_create_secondary_cluster_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_secondary_cluster_flattened(): +def test_switchover_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_secondary_cluster( - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + client.switchover_cluster( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].cluster - mock_val = resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ) - assert arg == mock_val - arg = args[0].cluster_id - mock_val = "cluster_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_secondary_cluster_flattened_error(): +def test_switchover_cluster_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3956,25 +3961,21 @@ def test_create_secondary_cluster_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_secondary_cluster( - service.CreateSecondaryClusterRequest(), - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + client.switchover_cluster( + service.SwitchoverClusterRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_secondary_cluster_flattened_async(): +async def test_switchover_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3984,33 +3985,21 @@ async def test_create_secondary_cluster_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_secondary_cluster( - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + response = await client.switchover_cluster( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].cluster - mock_val = resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ) - assert arg == mock_val - arg = args[0].cluster_id - mock_val = "cluster_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_secondary_cluster_flattened_error_async(): +async def test_switchover_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4018,24 +4007,20 @@ async def test_create_secondary_cluster_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_secondary_cluster( - service.CreateSecondaryClusterRequest(), - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + await client.switchover_cluster( + service.SwitchoverClusterRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.ListInstancesRequest, + service.RestoreClusterRequest, dict, ], ) -def test_list_instances(request_type, transport: str = "grpc"): +def test_restore_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4046,27 +4031,22 @@ def test_list_instances(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_instances(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_instances_non_empty_request_with_auto_populated_field(): +def test_restore_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -4077,30 +4057,28 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListInstancesRequest( + request = service.RestoreClusterRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + cluster_id="cluster_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_instances(request=request) + client.restore_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListInstancesRequest( + assert args[0] == service.RestoreClusterRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + cluster_id="cluster_id_value", + request_id="request_id_value", ) -def test_list_instances_use_cached_wrapped_rpc(): +def test_restore_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4114,21 +4092,26 @@ def test_list_instances_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods + assert client._transport.restore_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + client._transport._wrapped_methods[client._transport.restore_cluster] = mock_rpc request = {} - client.list_instances(request) + client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_instances(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4136,7 +4119,7 @@ def test_list_instances_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_instances_async_use_cached_wrapped_rpc( +async def test_restore_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4153,7 +4136,7 @@ async def test_list_instances_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_instances + client._client._transport.restore_cluster in client._client._transport._wrapped_methods ) @@ -4161,16 +4144,21 @@ async def test_list_instances_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_instances + client._client._transport.restore_cluster ] = mock_rpc request = {} - await client.list_instances(request) + await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_instances(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4178,8 +4166,8 @@ async def test_list_instances_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_instances_async( - transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest +async def test_restore_cluster_async( + transport: str = "grpc_asyncio", request_type=service.RestoreClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -4191,48 +4179,43 @@ async def test_list_instances_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_instances(request) + response = await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) +async def test_restore_cluster_async_from_dict(): + await test_restore_cluster_async(request_type=dict) -def test_list_instances_field_headers(): +def test_restore_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - call.return_value = service.ListInstancesResponse() - client.list_instances(request) + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4248,23 +4231,23 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio -async def test_list_instances_field_headers_async(): +async def test_restore_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListInstancesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_instances(request) + await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4279,290 +4262,396 @@ async def test_list_instances_field_headers_async(): ) in kw["metadata"] -def test_list_instances_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryClusterRequest, + dict, + ], +) +def test_create_secondary_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent="parent_value", - ) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_secondary_cluster(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + request = service.CreateSecondaryClusterRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_instances_flattened_error(): + +def test_create_secondary_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - service.ListInstancesRequest(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_secondary_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest( parent="parent_value", + cluster_id="cluster_id_value", + request_id="request_id_value", + ) + + +def test_create_secondary_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_secondary_cluster + in client._transport._wrapped_methods ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_secondary_cluster + ] = mock_rpc + request = {} + client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_secondary_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_instances_flattened_async(): +async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_secondary_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_secondary_cluster + ] = mock_rpc + + request = {} + await client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_secondary_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryClusterRequest +): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListInstancesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListInstancesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent="parent_value", + operations_pb2.Operation(name="operations/spam") ) + response = await client.create_secondary_cluster(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + request = service.CreateSecondaryClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - service.ListInstancesRequest(), - parent="parent_value", - ) +async def test_create_secondary_cluster_async_from_dict(): + await test_create_secondary_cluster_async(request_type=dict) -def test_list_instances_pager(transport_name: str = "grpc"): +def test_create_secondary_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], - ), - RuntimeError, - ) + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_secondary_cluster(request) - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - pager = client.list_instances(request={}, retry=retry, timeout=timeout) + await client.create_secondary_cluster(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Instance) for i in results) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_list_instances_pages(transport_name: str = "grpc"): +def test_create_secondary_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_secondary_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), - RuntimeError, + cluster_id="cluster_id_value", ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_instances_async_pager(): +async def test_create_secondary_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + type(client.transport.create_secondary_cluster), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - async_pager = await client.list_instances( - request={}, + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Instance) for i in responses) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_list_instances_async_pages(): +async def test_create_secondary_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), - RuntimeError, + cluster_id="cluster_id_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - service.GetInstanceRequest, + service.ListInstancesRequest, dict, ], ) -def test_get_instance(request_type, transport: str = "grpc"): +def test_list_instances(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4573,49 +4662,27 @@ def test_get_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Instance( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Instance.State.READY, - instance_type=resources.Instance.InstanceType.PRIMARY, - availability_type=resources.Instance.AvailabilityType.ZONAL, - gce_zone="gce_zone_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", - reconciling=True, - etag="etag_value", - satisfies_pzi=True, - satisfies_pzs=True, + call.return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.get_instance(request) + response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Instance.State.READY - assert response.instance_type == resources.Instance.InstanceType.PRIMARY - assert response.availability_type == resources.Instance.AvailabilityType.ZONAL - assert response.gce_zone == "gce_zone_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.satisfies_pzi is True - assert response.satisfies_pzs is True + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_instance_non_empty_request_with_auto_populated_field(): +def test_list_instances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -4626,24 +4693,30 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetInstanceRequest( - name="name_value", + request = service.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_instance(request=request) + client.list_instances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetInstanceRequest( - name="name_value", + assert args[0] == service.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_instance_use_cached_wrapped_rpc(): +def test_list_instances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4657,21 +4730,21 @@ def test_get_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods + assert client._transport.list_instances in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} - client.get_instance(request) + client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_instance(request) + client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4679,7 +4752,7 @@ def test_get_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_instance_async_use_cached_wrapped_rpc( +async def test_list_instances_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4696,7 +4769,7 @@ async def test_get_instance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_instance + client._client._transport.list_instances in client._client._transport._wrapped_methods ) @@ -4704,16 +4777,16 @@ async def test_get_instance_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_instance + client._client._transport.list_instances ] = mock_rpc request = {} - await client.get_instance(request) + await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_instance(request) + await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4721,8 +4794,8 @@ async def test_get_instance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_instance_async( - transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -4734,70 +4807,48 @@ async def test_get_instance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Instance( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Instance.State.READY, - instance_type=resources.Instance.InstanceType.PRIMARY, - availability_type=resources.Instance.AvailabilityType.ZONAL, - gce_zone="gce_zone_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", - reconciling=True, - etag="etag_value", - satisfies_pzi=True, - satisfies_pzs=True, + service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_instance(request) + response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Instance.State.READY - assert response.instance_type == resources.Instance.InstanceType.PRIMARY - assert response.availability_type == resources.Instance.AvailabilityType.ZONAL - assert response.gce_zone == "gce_zone_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.satisfies_pzi is True - assert response.satisfies_pzs is True + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) -def test_get_instance_field_headers(): +def test_list_instances_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = resources.Instance() - client.get_instance(request) + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = service.ListInstancesResponse() + client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4808,26 +4859,28 @@ def test_get_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_instance_field_headers_async(): +async def test_list_instances_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) - await client.get_instance(request) + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4838,35 +4891,35 @@ async def test_get_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_instance_flattened(): +def test_list_instances_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Instance() + call.return_value = service.ListInstancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_instance( - name="name_value", + client.list_instances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_instance_flattened_error(): +def test_list_instances_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4874,41 +4927,43 @@ def test_get_instance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_instance( - service.GetInstanceRequest(), - name="name_value", + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_instance_flattened_async(): +async def test_list_instances_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Instance() + call.return_value = service.ListInstancesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_instance( - name="name_value", + response = await client.list_instances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): +async def test_list_instances_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4916,20 +4971,214 @@ async def test_get_instance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_instance( - service.GetInstanceRequest(), - name="name_value", + await client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.CreateInstanceRequest, +def test_list_instances_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + +def test_list_instances_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetInstanceRequest, dict, ], ) -def test_create_instance(request_type, transport: str = "grpc"): +def test_get_instance(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4940,22 +5189,53 @@ def test_create_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) + call.return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + reconciling=True, + etag="etag_value", + satisfies_pzi=True, + satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], + ) + response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] -def test_create_instance_non_empty_request_with_auto_populated_field(): +def test_get_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -4966,28 +5246,24 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - request_id="request_id_value", + request = service.GetInstanceRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_instance(request=request) + client.get_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - request_id="request_id_value", - ) + assert args[0] == service.GetInstanceRequest( + name="name_value", + ) -def test_create_instance_use_cached_wrapped_rpc(): +def test_get_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5001,26 +5277,21 @@ def test_create_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_instance in client._transport._wrapped_methods + assert client._transport.get_instance in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} - client.create_instance(request) + client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance(request) + client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5028,7 +5299,7 @@ def test_create_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_instance_async_use_cached_wrapped_rpc( +async def test_get_instance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5045,7 +5316,7 @@ async def test_create_instance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_instance + client._client._transport.get_instance in client._client._transport._wrapped_methods ) @@ -5053,21 +5324,16 @@ async def test_create_instance_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_instance + client._client._transport.get_instance ] = mock_rpc request = {} - await client.create_instance(request) + await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance(request) + await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5075,8 +5341,8 @@ async def test_create_instance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_instance_async( - transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -5088,43 +5354,74 @@ async def test_create_instance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + reconciling=True, + etag="etag_value", + satisfies_pzi=True, + satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], + ) ) - response = await client.create_instance(request) + response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] @pytest.mark.asyncio -async def test_create_instance_async_from_dict(): - await test_create_instance_async(request_type=dict) +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) -def test_create_instance_field_headers(): +def test_get_instance_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = resources.Instance() + client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5135,28 +5432,26 @@ def test_create_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_instance_field_headers_async(): +async def test_get_instance_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_instance(request) + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5167,43 +5462,35 @@ async def test_create_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_instance_flattened(): +def test_get_instance_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Instance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_instance( - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + client.get_instance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance - mock_val = resources.Instance(name="name_value") - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_instance_flattened_error(): +def test_get_instance_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5211,53 +5498,41 @@ def test_create_instance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_instance( - service.CreateInstanceRequest(), - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + client.get_instance( + service.GetInstanceRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_instance_flattened_async(): +async def test_get_instance_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Instance() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_instance( - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + response = await client.get_instance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance - mock_val = resources.Instance(name="name_value") - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_instance_flattened_error_async(): +async def test_get_instance_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5265,22 +5540,20 @@ async def test_create_instance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_instance( - service.CreateInstanceRequest(), - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + await client.get_instance( + service.GetInstanceRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.CreateSecondaryInstanceRequest, + service.CreateInstanceRequest, dict, ], ) -def test_create_secondary_instance(request_type, transport: str = "grpc"): +def test_create_instance(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5291,24 +5564,22 @@ def test_create_secondary_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_secondary_instance(request) + response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_secondary_instance_non_empty_request_with_auto_populated_field(): +def test_create_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -5319,30 +5590,28 @@ def test_create_secondary_instance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateSecondaryInstanceRequest( + request = service.CreateInstanceRequest( parent="parent_value", instance_id="instance_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_secondary_instance(request=request) + client.create_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateSecondaryInstanceRequest( + assert args[0] == service.CreateInstanceRequest( parent="parent_value", instance_id="instance_id_value", request_id="request_id_value", ) -def test_create_secondary_instance_use_cached_wrapped_rpc(): +def test_create_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5356,21 +5625,16 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_secondary_instance - in client._transport._wrapped_methods - ) + assert client._transport.create_instance in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_secondary_instance - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} - client.create_secondary_instance(request) + client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5380,7 +5644,7 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_secondary_instance(request) + client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5388,7 +5652,7 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_secondary_instance_async_use_cached_wrapped_rpc( +async def test_create_instance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5405,7 +5669,7 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_secondary_instance + client._client._transport.create_instance in client._client._transport._wrapped_methods ) @@ -5413,11 +5677,11 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_secondary_instance + client._client._transport.create_instance ] = mock_rpc request = {} - await client.create_secondary_instance(request) + await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5427,7 +5691,7 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_secondary_instance(request) + await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5435,8 +5699,8 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_secondary_instance_async( - transport: str = "grpc_asyncio", request_type=service.CreateSecondaryInstanceRequest +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -5448,19 +5712,17 @@ async def test_create_secondary_instance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_secondary_instance(request) + response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -5468,27 +5730,25 @@ async def test_create_secondary_instance_async( @pytest.mark.asyncio -async def test_create_secondary_instance_async_from_dict(): - await test_create_secondary_instance_async(request_type=dict) +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) -def test_create_secondary_instance_field_headers(): +def test_create_instance_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_secondary_instance(request) + client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5504,25 +5764,23 @@ def test_create_secondary_instance_field_headers(): @pytest.mark.asyncio -async def test_create_secondary_instance_field_headers_async(): +async def test_create_instance_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_secondary_instance(request) + await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5537,20 +5795,18 @@ async def test_create_secondary_instance_field_headers_async(): ) in kw["metadata"] -def test_create_secondary_instance_flattened(): +def test_create_instance_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_secondary_instance( + client.create_instance( parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5571,7 +5827,7 @@ def test_create_secondary_instance_flattened(): assert arg == mock_val -def test_create_secondary_instance_flattened_error(): +def test_create_instance_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5579,8 +5835,8 @@ def test_create_secondary_instance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_secondary_instance( - service.CreateSecondaryInstanceRequest(), + client.create_instance( + service.CreateInstanceRequest(), parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5588,15 +5844,13 @@ def test_create_secondary_instance_flattened_error(): @pytest.mark.asyncio -async def test_create_secondary_instance_flattened_async(): +async def test_create_instance_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -5605,7 +5859,7 @@ async def test_create_secondary_instance_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_secondary_instance( + response = await client.create_instance( parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5627,7 +5881,7 @@ async def test_create_secondary_instance_flattened_async(): @pytest.mark.asyncio -async def test_create_secondary_instance_flattened_error_async(): +async def test_create_instance_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5635,8 +5889,8 @@ async def test_create_secondary_instance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_secondary_instance( - service.CreateSecondaryInstanceRequest(), + await client.create_instance( + service.CreateInstanceRequest(), parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5646,11 +5900,11 @@ async def test_create_secondary_instance_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.BatchCreateInstancesRequest, + service.CreateSecondaryInstanceRequest, dict, ], ) -def test_batch_create_instances(request_type, transport: str = "grpc"): +def test_create_secondary_instance(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5662,23 +5916,23 @@ def test_batch_create_instances(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.batch_create_instances(request) + response = client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_batch_create_instances_non_empty_request_with_auto_populated_field(): +def test_create_secondary_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -5689,28 +5943,30 @@ def test_batch_create_instances_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.BatchCreateInstancesRequest( + request = service.CreateSecondaryInstanceRequest( parent="parent_value", + instance_id="instance_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_create_instances(request=request) + client.create_secondary_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.BatchCreateInstancesRequest( + assert args[0] == service.CreateSecondaryInstanceRequest( parent="parent_value", + instance_id="instance_id_value", request_id="request_id_value", ) -def test_batch_create_instances_use_cached_wrapped_rpc(): +def test_create_secondary_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5725,7 +5981,7 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_create_instances + client._transport.create_secondary_instance in client._transport._wrapped_methods ) @@ -5735,10 +5991,10 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_create_instances + client._transport.create_secondary_instance ] = mock_rpc request = {} - client.batch_create_instances(request) + client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5748,7 +6004,7 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.batch_create_instances(request) + client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5756,7 +6012,7 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_batch_create_instances_async_use_cached_wrapped_rpc( +async def test_create_secondary_instance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5773,7 +6029,7 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.batch_create_instances + client._client._transport.create_secondary_instance in client._client._transport._wrapped_methods ) @@ -5781,11 +6037,11 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.batch_create_instances + client._client._transport.create_secondary_instance ] = mock_rpc request = {} - await client.batch_create_instances(request) + await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5795,7 +6051,7 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.batch_create_instances(request) + await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5803,8 +6059,8 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_batch_create_instances_async( - transport: str = "grpc_asyncio", request_type=service.BatchCreateInstancesRequest +async def test_create_secondary_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryInstanceRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -5817,18 +6073,18 @@ async def test_batch_create_instances_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.batch_create_instances(request) + response = await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -5836,27 +6092,27 @@ async def test_batch_create_instances_async( @pytest.mark.asyncio -async def test_batch_create_instances_async_from_dict(): - await test_batch_create_instances_async(request_type=dict) +async def test_create_secondary_instance_async_from_dict(): + await test_create_secondary_instance_async(request_type=dict) -def test_batch_create_instances_field_headers(): +def test_create_secondary_instance_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.batch_create_instances(request) + client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5872,25 +6128,25 @@ def test_batch_create_instances_field_headers(): @pytest.mark.asyncio -async def test_batch_create_instances_field_headers_async(): +async def test_create_secondary_instance_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.batch_create_instances(request) + await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5905,14 +6161,120 @@ async def test_batch_create_instances_field_headers_async(): ) in kw["metadata"] +def test_create_secondary_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_secondary_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + @pytest.mark.parametrize( "request_type", [ - service.UpdateInstanceRequest, + service.BatchCreateInstancesRequest, dict, ], ) -def test_update_instance(request_type, transport: str = "grpc"): +def test_batch_create_instances(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5923,22 +6285,24 @@ def test_update_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) + response = client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateInstanceRequest() + request = service.BatchCreateInstancesRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_instance_non_empty_request_with_auto_populated_field(): +def test_batch_create_instances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -5949,24 +6313,28 @@ def test_update_instance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateInstanceRequest( + request = service.BatchCreateInstancesRequest( + parent="parent_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_instance(request=request) + client.batch_create_instances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateInstanceRequest( + assert args[0] == service.BatchCreateInstancesRequest( + parent="parent_value", request_id="request_id_value", ) -def test_update_instance_use_cached_wrapped_rpc(): +def test_batch_create_instances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5980,16 +6348,21 @@ def test_update_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods + assert ( + client._transport.batch_create_instances + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_create_instances + ] = mock_rpc request = {} - client.update_instance(request) + client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5999,7 +6372,7 @@ def test_update_instance_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_instance(request) + client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6007,7 +6380,7 @@ def test_update_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_instance_async_use_cached_wrapped_rpc( +async def test_batch_create_instances_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6024,7 +6397,258 @@ async def test_update_instance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_instance + client._client._transport.batch_create_instances + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_create_instances + ] = mock_rpc + + request = {} + await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.batch_create_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_create_instances_async( + transport: str = "grpc_asyncio", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.BatchCreateInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_instances_async_from_dict(): + await test_batch_create_instances_async(request_type=dict) + + +def test_batch_create_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateInstanceRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest( + request_id="request_id_value", + ) + + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_instance in client._client._transport._wrapped_methods ) @@ -7591,11 +8215,11 @@ async def test_restart_instance_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.ListBackupsRequest, + service.ExecuteSqlRequest, dict, ], ) -def test_list_backups(request_type, transport: str = "grpc"): +def test_execute_sql(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7606,27 +8230,22 @@ def test_list_backups(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_backups(request) + call.return_value = service.ExecuteSqlResponse() + response = client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, service.ExecuteSqlResponse) -def test_list_backups_non_empty_request_with_auto_populated_field(): +def test_execute_sql_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -7637,30 +8256,32 @@ def test_list_backups_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListBackupsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = service.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_backups(request=request) + client.execute_sql(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListBackupsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == service.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", ) -def test_list_backups_use_cached_wrapped_rpc(): +def test_execute_sql_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7674,21 +8295,21 @@ def test_list_backups_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods + assert client._transport.execute_sql in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc request = {} - client.list_backups(request) + client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backups(request) + client.execute_sql(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7696,7 +8317,7 @@ def test_list_backups_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_backups_async_use_cached_wrapped_rpc( +async def test_execute_sql_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7713,7 +8334,7 @@ async def test_list_backups_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_backups + client._client._transport.execute_sql in client._client._transport._wrapped_methods ) @@ -7721,16 +8342,16 @@ async def test_list_backups_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_backups + client._client._transport.execute_sql ] = mock_rpc request = {} - await client.list_backups(request) + await client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_backups(request) + await client.execute_sql(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7738,8 +8359,8 @@ async def test_list_backups_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_backups_async( - transport: str = "grpc_asyncio", request_type=service.ListBackupsRequest +async def test_execute_sql_async( + transport: str = "grpc_asyncio", request_type=service.ExecuteSqlRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -7751,48 +8372,43 @@ async def test_list_backups_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + service.ExecuteSqlResponse() ) - response = await client.list_backups(request) + response = await client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, service.ExecuteSqlResponse) @pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) +async def test_execute_sql_async_from_dict(): + await test_execute_sql_async(request_type=dict) -def test_list_backups_field_headers(): +def test_execute_sql_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() - request.parent = "parent_value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value = service.ListBackupsResponse() - client.list_backups(request) + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + call.return_value = service.ExecuteSqlResponse() + client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7803,28 +8419,28 @@ def test_list_backups_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "instance=instance_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_backups_field_headers_async(): +async def test_execute_sql_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() - request.parent = "parent_value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListBackupsResponse() + service.ExecuteSqlResponse() ) - await client.list_backups(request) + await client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7835,35 +8451,49 @@ async def test_list_backups_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "instance=instance_value", ) in kw["metadata"] -def test_list_backups_flattened(): +def test_execute_sql_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListBackupsResponse() + call.return_value = service.ExecuteSqlResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backups( - parent="parent_value", + client.execute_sql( + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].database + mock_val = "database_value" + assert arg == mock_val + arg = args[0].user + mock_val = "user_value" assert arg == mock_val + arg = args[0].sql_statement + mock_val = "sql_statement_value" + assert arg == mock_val + assert args[0].password == "password_value" -def test_list_backups_flattened_error(): +def test_execute_sql_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7871,43 +8501,61 @@ def test_list_backups_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - service.ListBackupsRequest(), - parent="parent_value", + client.execute_sql( + service.ExecuteSqlRequest(), + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) @pytest.mark.asyncio -async def test_list_backups_flattened_async(): +async def test_execute_sql_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListBackupsResponse() + call.return_value = service.ExecuteSqlResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListBackupsResponse() + service.ExecuteSqlResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backups( - parent="parent_value", + response = await client.execute_sql( + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].instance + mock_val = "instance_value" assert arg == mock_val + arg = args[0].database + mock_val = "database_value" + assert arg == mock_val + arg = args[0].user + mock_val = "user_value" + assert arg == mock_val + arg = args[0].sql_statement + mock_val = "sql_statement_value" + assert arg == mock_val + assert args[0].password == "password_value" @pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): +async def test_execute_sql_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7915,325 +8563,117 @@ async def test_list_backups_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backups( - service.ListBackupsRequest(), - parent="parent_value", + await client.execute_sql( + service.ExecuteSqlRequest(), + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) -def test_list_backups_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_backups(request={}, retry=retry, timeout=timeout) + response = client.list_backups(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListBackupsRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Backup) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_backups_pages(transport_name: str = "grpc"): +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - pages = list(client.list_backups(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_backups_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backups( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Backup) for i in responses) - - -@pytest.mark.asyncio -async def test_list_backups_async_pages(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetBackupRequest, - dict, - ], -) -def test_get_backup(request_type, transport: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Backup( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Backup.State.READY, - type_=resources.Backup.Type.ON_DEMAND, - description="description_value", - cluster_uid="cluster_uid_value", - cluster_name="cluster_name_value", - reconciling=True, - etag="etag_value", - size_bytes=1089, - satisfies_pzi=True, - satisfies_pzs=True, - database_version=resources.DatabaseVersion.POSTGRES_13, - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Backup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Backup.State.READY - assert response.type_ == resources.Backup.Type.ON_DEMAND - assert response.description == "description_value" - assert response.cluster_uid == "cluster_uid_value" - assert response.cluster_name == "cluster_name_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.size_bytes == 1089 - assert response.satisfies_pzi is True - assert response.satisfies_pzs is True - assert response.database_version == resources.DatabaseVersion.POSTGRES_13 - - -def test_get_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetBackupRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetBackupRequest( - name="name_value", - ) - - -def test_get_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods + assert client._transport.list_backups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc request = {} - client.get_backup(request) + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup(request) + client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8241,7 +8681,9 @@ def test_get_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_backups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8256,7 +8698,7 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.get_backup + client._client._transport.list_backups in client._client._transport._wrapped_methods ) @@ -8264,16 +8706,16 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup + client._client._transport.list_backups ] = mock_rpc request = {} - await client.get_backup(request) + await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup(request) + await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8281,8 +8723,8 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_get_backup_async( - transport: str = "grpc_asyncio", request_type=service.GetBackupRequest +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=service.ListBackupsRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -8294,72 +8736,48 @@ async def test_get_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Backup( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Backup.State.READY, - type_=resources.Backup.Type.ON_DEMAND, - description="description_value", - cluster_uid="cluster_uid_value", - cluster_name="cluster_name_value", - reconciling=True, - etag="etag_value", - size_bytes=1089, - satisfies_pzi=True, - satisfies_pzs=True, - database_version=resources.DatabaseVersion.POSTGRES_13, + service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_backup(request) + response = await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetBackupRequest() + request = service.ListBackupsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Backup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Backup.State.READY - assert response.type_ == resources.Backup.Type.ON_DEMAND - assert response.description == "description_value" - assert response.cluster_uid == "cluster_uid_value" - assert response.cluster_name == "cluster_name_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.size_bytes == 1089 - assert response.satisfies_pzi is True - assert response.satisfies_pzs is True - assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) -def test_get_backup_field_headers(): +def test_list_backups_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetBackupRequest() + request = service.ListBackupsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = resources.Backup() - client.get_backup(request) + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = service.ListBackupsResponse() + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8370,26 +8788,28 @@ def test_get_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_backup_field_headers_async(): +async def test_list_backups_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetBackupRequest() + request = service.ListBackupsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) - await client.get_backup(request) + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8400,35 +8820,35 @@ async def test_get_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_backup_flattened(): +def test_list_backups_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Backup() + call.return_value = service.ListBackupsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup( - name="name_value", + client.list_backups( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_backup_flattened_error(): +def test_list_backups_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8436,41 +8856,43 @@ def test_get_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - service.GetBackupRequest(), - name="name_value", + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_backup_flattened_async(): +async def test_list_backups_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Backup() + call.return_value = service.ListBackupsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup( - name="name_value", + response = await client.list_backups( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): +async def test_list_backups_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8478,111 +8900,325 @@ async def test_get_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup( - service.GetBackupRequest(), - name="name_value", + await client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.CreateBackupRequest, - dict, - ], -) -def test_create_backup(request_type, transport: str = "grpc"): +def test_list_backups_pager(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup(request) + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateBackupRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) -def test_create_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. + +def test_list_backups_pages(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - request_id="request_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, ) - client.create_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - request_id="request_id_value", + async_pager = await client.list_backups( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, resources.Backup) for i in responses) -def test_create_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + satisfies_pzi=True, + satisfies_pzs=True, + database_version=resources.DatabaseVersion.POSTGRES_13, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc request = {} - client.create_backup(request) + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup(request) + client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8590,9 +9226,7 @@ def test_create_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_backup_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8607,7 +9241,7 @@ async def test_create_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_backup + client._client._transport.get_backup in client._client._transport._wrapped_methods ) @@ -8615,21 +9249,16 @@ async def test_create_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_backup + client._client._transport.get_backup ] = mock_rpc request = {} - await client.create_backup(request) + await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup(request) + await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8637,8 +9266,8 @@ async def test_create_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_backup_async( - transport: str = "grpc_asyncio", request_type=service.CreateBackupRequest +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=service.GetBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -8650,43 +9279,72 @@ async def test_create_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + satisfies_pzi=True, + satisfies_pzs=True, + database_version=resources.DatabaseVersion.POSTGRES_13, + ) ) - response = await client.create_backup(request) + response = await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateBackupRequest() + request = service.GetBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 @pytest.mark.asyncio -async def test_create_backup_async_from_dict(): - await test_create_backup_async(request_type=dict) +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) -def test_create_backup_field_headers(): +def test_get_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateBackupRequest() + request = service.GetBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = resources.Backup() + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8697,28 +9355,26 @@ def test_create_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_backup_field_headers_async(): +async def test_get_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateBackupRequest() + request = service.GetBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_backup(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8729,43 +9385,35 @@ async def test_create_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_backup_flattened(): +def test_get_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Backup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_backup( - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + client.get_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup - mock_val = resources.Backup(name="name_value") - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_backup_flattened_error(): +def test_get_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8773,53 +9421,41 @@ def test_create_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup( - service.CreateBackupRequest(), - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + client.get_backup( + service.GetBackupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_backup_flattened_async(): +async def test_get_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Backup() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_backup( - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + response = await client.get_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup - mock_val = resources.Backup(name="name_value") - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_backup_flattened_error_async(): +async def test_get_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8827,22 +9463,20 @@ async def test_create_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_backup( - service.CreateBackupRequest(), - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + await client.get_backup( + service.GetBackupRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateBackupRequest, + service.CreateBackupRequest, dict, ], ) -def test_update_backup(request_type, transport: str = "grpc"): +def test_create_backup(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8853,22 +9487,22 @@ def test_update_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_backup(request) + response = client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_backup_non_empty_request_with_auto_populated_field(): +def test_create_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -8879,24 +9513,28 @@ def test_update_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateBackupRequest( + request = service.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_backup(request=request) + client.create_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateBackupRequest( + assert args[0] == service.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", request_id="request_id_value", ) -def test_update_backup_use_cached_wrapped_rpc(): +def test_create_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8910,16 +9548,16 @@ def test_update_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods + assert client._transport.create_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc request = {} - client.update_backup(request) + client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8929,7 +9567,7 @@ def test_update_backup_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup(request) + client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8937,7 +9575,7 @@ def test_update_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_backup_async_use_cached_wrapped_rpc( +async def test_create_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8954,7 +9592,7 @@ async def test_update_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_backup + client._client._transport.create_backup in client._client._transport._wrapped_methods ) @@ -8962,11 +9600,11 @@ async def test_update_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_backup + client._client._transport.create_backup ] = mock_rpc request = {} - await client.update_backup(request) + await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8976,7 +9614,7 @@ async def test_update_backup_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_backup(request) + await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8984,8 +9622,8 @@ async def test_update_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_backup_async( - transport: str = "grpc_asyncio", request_type=service.UpdateBackupRequest +async def test_create_backup_async( + transport: str = "grpc_asyncio", request_type=service.CreateBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -8997,17 +9635,17 @@ async def test_update_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_backup(request) + response = await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -9015,25 +9653,25 @@ async def test_update_backup_async( @pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) -def test_update_backup_field_headers(): +def test_create_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() - request.backup.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_backup(request) + client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9044,28 +9682,28 @@ def test_update_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_backup_field_headers_async(): +async def test_create_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() - request.backup.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_backup(request) + await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9076,39 +9714,43 @@ async def test_update_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_backup_flattened(): +def test_create_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_backup( + client.create_backup( + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup mock_val = resources.Backup(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_id + mock_val = "backup_id_value" assert arg == mock_val -def test_update_backup_flattened_error(): +def test_create_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9116,21 +9758,22 @@ def test_update_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - service.UpdateBackupRequest(), + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) @pytest.mark.asyncio -async def test_update_backup_flattened_async(): +async def test_create_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9139,25 +9782,29 @@ async def test_update_backup_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_backup( + response = await client.create_backup( + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup mock_val = resources.Backup(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_id + mock_val = "backup_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): +async def test_create_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9165,21 +9812,22 @@ async def test_update_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_backup( - service.UpdateBackupRequest(), + await client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) @pytest.mark.parametrize( "request_type", [ - service.DeleteBackupRequest, + service.UpdateBackupRequest, dict, ], ) -def test_delete_backup(request_type, transport: str = "grpc"): +def test_update_backup(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9190,22 +9838,22 @@ def test_delete_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup(request) + response = client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_backup_non_empty_request_with_auto_populated_field(): +def test_update_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -9216,28 +9864,24 @@ def test_delete_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteBackupRequest( - name="name_value", + request = service.UpdateBackupRequest( request_id="request_id_value", - etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_backup(request=request) + client.update_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteBackupRequest( - name="name_value", + assert args[0] == service.UpdateBackupRequest( request_id="request_id_value", - etag="etag_value", ) -def test_delete_backup_use_cached_wrapped_rpc(): +def test_update_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9251,16 +9895,16 @@ def test_delete_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods + assert client._transport.update_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc request = {} - client.delete_backup(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -9270,7 +9914,7 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup(request) + client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9278,7 +9922,7 @@ def test_delete_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_backup_async_use_cached_wrapped_rpc( +async def test_update_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9295,7 +9939,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_backup + client._client._transport.update_backup in client._client._transport._wrapped_methods ) @@ -9303,11 +9947,11 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_backup + client._client._transport.update_backup ] = mock_rpc request = {} - await client.delete_backup(request) + await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -9317,7 +9961,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_backup(request) + await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9325,8 +9969,8 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_backup_async( - transport: str = "grpc_asyncio", request_type=service.DeleteBackupRequest +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=service.UpdateBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -9338,17 +9982,17 @@ async def test_delete_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_backup(request) + response = await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -9356,25 +10000,25 @@ async def test_delete_backup_async( @pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) -def test_delete_backup_field_headers(): +def test_update_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() - request.name = "name_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9385,28 +10029,28 @@ def test_delete_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): +async def test_update_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() - request.name = "name_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_backup(request) + await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9417,35 +10061,39 @@ async def test_delete_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup.name=name_value", ) in kw["metadata"] -def test_delete_backup_flattened(): +def test_update_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup( - name="name_value", + client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_backup_flattened_error(): +def test_update_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9453,20 +10101,21 @@ def test_delete_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - service.DeleteBackupRequest(), - name="name_value", + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_backup_flattened_async(): +async def test_update_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9475,21 +10124,25 @@ async def test_delete_backup_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup( - name="name_value", + response = await client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): +async def test_update_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9497,20 +10150,21 @@ async def test_delete_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup( - service.DeleteBackupRequest(), - name="name_value", + await client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.ListSupportedDatabaseFlagsRequest, + service.DeleteBackupRequest, dict, ], ) -def test_list_supported_database_flags(request_type, transport: str = "grpc"): +def test_delete_backup(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9521,27 +10175,22 @@ def test_list_supported_database_flags(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSupportedDatabaseFlagsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_supported_database_flags(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) -def test_list_supported_database_flags_non_empty_request_with_auto_populated_field(): +def test_delete_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -9552,28 +10201,28 @@ def test_list_supported_database_flags_non_empty_request_with_auto_populated_fie # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListSupportedDatabaseFlagsRequest( - parent="parent_value", - page_token="page_token_value", + request = service.DeleteBackupRequest( + name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_supported_database_flags(request=request) + client.delete_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSupportedDatabaseFlagsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == service.DeleteBackupRequest( + name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_list_supported_database_flags_use_cached_wrapped_rpc(): +def test_delete_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9587,26 +10236,26 @@ def test_list_supported_database_flags_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_supported_database_flags - in client._transport._wrapped_methods - ) + assert client._transport.delete_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_supported_database_flags - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc request = {} - client.list_supported_database_flags(request) + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_supported_database_flags(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9614,7 +10263,7 @@ def test_list_supported_database_flags_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( +async def test_delete_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9631,7 +10280,7 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_supported_database_flags + client._client._transport.delete_backup in client._client._transport._wrapped_methods ) @@ -9639,16 +10288,21 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_supported_database_flags + client._client._transport.delete_backup ] = mock_rpc request = {} - await client.list_supported_database_flags(request) + await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_supported_database_flags(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9656,9 +10310,8 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_supported_database_flags_async( - transport: str = "grpc_asyncio", - request_type=service.ListSupportedDatabaseFlagsRequest, +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=service.DeleteBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -9670,50 +10323,43 @@ async def test_list_supported_database_flags_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSupportedDatabaseFlagsResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_supported_database_flags(request) + response = await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSupportedDatabaseFlagsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_supported_database_flags_async_from_dict(): - await test_list_supported_database_flags_async(request_type=dict) +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) -def test_list_supported_database_flags_field_headers(): +def test_delete_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: - call.return_value = service.ListSupportedDatabaseFlagsResponse() - client.list_supported_database_flags(request) + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9724,30 +10370,28 @@ def test_list_supported_database_flags_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_supported_database_flags_field_headers_async(): +async def test_delete_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSupportedDatabaseFlagsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_supported_database_flags(request) + await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9758,37 +10402,35 @@ async def test_list_supported_database_flags_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_supported_database_flags_flattened(): +def test_delete_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSupportedDatabaseFlagsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_supported_database_flags( - parent="parent_value", + client.delete_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_supported_database_flags_flattened_error(): +def test_delete_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9796,45 +10438,43 @@ def test_list_supported_database_flags_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_supported_database_flags( - service.ListSupportedDatabaseFlagsRequest(), - parent="parent_value", + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_supported_database_flags_flattened_async(): +async def test_delete_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSupportedDatabaseFlagsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSupportedDatabaseFlagsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_supported_database_flags( - parent="parent_value", + response = await client.delete_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_supported_database_flags_flattened_error_async(): +async def test_delete_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9842,224 +10482,20 @@ async def test_list_supported_database_flags_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_supported_database_flags( - service.ListSupportedDatabaseFlagsRequest(), - parent="parent_value", - ) - - -def test_list_supported_database_flags_pager(transport_name: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_supported_database_flags( - request={}, retry=retry, timeout=timeout - ) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) - - -def test_list_supported_database_flags_pages(transport_name: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, - ) - pages = list(client.list_supported_database_flags(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_supported_database_flags_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_supported_database_flags( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in responses) - - -@pytest.mark.asyncio -async def test_list_supported_database_flags_async_pages(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, + await client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_supported_database_flags(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - service.GenerateClientCertificateRequest, + service.ListSupportedDatabaseFlagsRequest, dict, ], ) -def test_generate_client_certificate(request_type, transport: str = "grpc"): +def test_list_supported_database_flags(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10071,30 +10507,26 @@ def test_generate_client_certificate(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateClientCertificateResponse( - pem_certificate="pem_certificate_value", - pem_certificate_chain=["pem_certificate_chain_value"], - ca_cert="ca_cert_value", + call.return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", ) - response = client.generate_client_certificate(request) + response = client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.GenerateClientCertificateResponse) - assert response.pem_certificate == "pem_certificate_value" - assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.ca_cert == "ca_cert_value" + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" -def test_generate_client_certificate_non_empty_request_with_auto_populated_field(): +def test_list_supported_database_flags_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -10105,32 +10537,28 @@ def test_generate_client_certificate_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GenerateClientCertificateRequest( + request = service.ListSupportedDatabaseFlagsRequest( parent="parent_value", - request_id="request_id_value", - pem_csr="pem_csr_value", - public_key="public_key_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.generate_client_certificate(request=request) + client.list_supported_database_flags(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GenerateClientCertificateRequest( + assert args[0] == service.ListSupportedDatabaseFlagsRequest( parent="parent_value", - request_id="request_id_value", - pem_csr="pem_csr_value", - public_key="public_key_value", + page_token="page_token_value", ) -def test_generate_client_certificate_use_cached_wrapped_rpc(): +def test_list_supported_database_flags_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10145,7 +10573,7 @@ def test_generate_client_certificate_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.generate_client_certificate + client._transport.list_supported_database_flags in client._transport._wrapped_methods ) @@ -10155,15 +10583,15 @@ def test_generate_client_certificate_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.generate_client_certificate + client._transport.list_supported_database_flags ] = mock_rpc request = {} - client.generate_client_certificate(request) + client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.generate_client_certificate(request) + client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10171,7 +10599,7 @@ def test_generate_client_certificate_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_generate_client_certificate_async_use_cached_wrapped_rpc( +async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10188,7 +10616,7 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.generate_client_certificate + client._client._transport.list_supported_database_flags in client._client._transport._wrapped_methods ) @@ -10196,16 +10624,16 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.generate_client_certificate + client._client._transport.list_supported_database_flags ] = mock_rpc request = {} - await client.generate_client_certificate(request) + await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.generate_client_certificate(request) + await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10213,9 +10641,9 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_generate_client_certificate_async( +async def test_list_supported_database_flags_async( transport: str = "grpc_asyncio", - request_type=service.GenerateClientCertificateRequest, + request_type=service.ListSupportedDatabaseFlagsRequest, ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -10228,53 +10656,49 @@ async def test_generate_client_certificate_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateClientCertificateResponse( - pem_certificate="pem_certificate_value", - pem_certificate_chain=["pem_certificate_chain_value"], - ca_cert="ca_cert_value", + service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.generate_client_certificate(request) + response = await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.GenerateClientCertificateResponse) - assert response.pem_certificate == "pem_certificate_value" - assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.ca_cert == "ca_cert_value" + assert isinstance(response, pagers.ListSupportedDatabaseFlagsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_generate_client_certificate_async_from_dict(): - await test_generate_client_certificate_async(request_type=dict) +async def test_list_supported_database_flags_async_from_dict(): + await test_list_supported_database_flags_async(request_type=dict) -def test_generate_client_certificate_field_headers(): +def test_list_supported_database_flags_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: - call.return_value = service.GenerateClientCertificateResponse() - client.generate_client_certificate(request) + call.return_value = service.ListSupportedDatabaseFlagsResponse() + client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10290,25 +10714,25 @@ def test_generate_client_certificate_field_headers(): @pytest.mark.asyncio -async def test_generate_client_certificate_field_headers_async(): +async def test_list_supported_database_flags_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateClientCertificateResponse() + service.ListSupportedDatabaseFlagsResponse() ) - await client.generate_client_certificate(request) + await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10323,20 +10747,20 @@ async def test_generate_client_certificate_field_headers_async(): ) in kw["metadata"] -def test_generate_client_certificate_flattened(): +def test_list_supported_database_flags_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateClientCertificateResponse() + call.return_value = service.ListSupportedDatabaseFlagsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.generate_client_certificate( + client.list_supported_database_flags( parent="parent_value", ) @@ -10349,7 +10773,7 @@ def test_generate_client_certificate_flattened(): assert arg == mock_val -def test_generate_client_certificate_flattened_error(): +def test_list_supported_database_flags_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10357,31 +10781,31 @@ def test_generate_client_certificate_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.generate_client_certificate( - service.GenerateClientCertificateRequest(), + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_generate_client_certificate_flattened_async(): +async def test_list_supported_database_flags_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateClientCertificateResponse() + call.return_value = service.ListSupportedDatabaseFlagsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateClientCertificateResponse() + service.ListSupportedDatabaseFlagsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.generate_client_certificate( + response = await client.list_supported_database_flags( parent="parent_value", ) @@ -10395,7 +10819,7 @@ async def test_generate_client_certificate_flattened_async(): @pytest.mark.asyncio -async def test_generate_client_certificate_flattened_error_async(): +async def test_list_supported_database_flags_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10403,125 +10827,328 @@ async def test_generate_client_certificate_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.generate_client_certificate( - service.GenerateClientCertificateRequest(), + await client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.GetConnectionInfoRequest, - dict, - ], -) -def test_get_connection_info(request_type, transport: str = "grpc"): +def test_list_supported_database_flags_pager(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.ConnectionInfo( - name="name_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", - pem_certificate_chain=["pem_certificate_chain_value"], - instance_uid="instance_uid_value", - psc_dns_name="psc_dns_name_value", + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, ) - response = client.get_connection_info(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetConnectionInfoRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_supported_database_flags( + request={}, retry=retry, timeout=timeout + ) - # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConnectionInfo) - assert response.name == "name_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" - assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.instance_uid == "instance_uid_value" - assert response.psc_dns_name == "psc_dns_name_value" + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) -def test_get_connection_info_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_list_supported_database_flags_pages(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetConnectionInfoRequest( - parent="parent_value", - request_id="request_id_value", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_connection_info(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetConnectionInfoRequest( - parent="parent_value", - request_id="request_id_value", - ) - - -def test_get_connection_info_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, ) + pages = list(client.list_supported_database_flags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.get_connection_info in client._transport._wrapped_methods - ) +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_connection_info - ] = mock_rpc - request = {} - client.get_connection_info(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_supported_database_flags( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_supported_database_flags(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GenerateClientCertificateRequest, + dict, + ], +) +def test_generate_client_certificate(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ca_cert="ca_cert_value", + ) + response = client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GenerateClientCertificateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.ca_cert == "ca_cert_value" + + +def test_generate_client_certificate_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GenerateClientCertificateRequest( + parent="parent_value", + request_id="request_id_value", + pem_csr="pem_csr_value", + public_key="public_key_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_client_certificate(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest( + parent="parent_value", + request_id="request_id_value", + pem_csr="pem_csr_value", + public_key="public_key_value", + ) + + +def test_generate_client_certificate_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_client_certificate + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_client_certificate + ] = mock_rpc + request = {} + client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_connection_info(request) + client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10529,7 +11156,7 @@ def test_get_connection_info_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_connection_info_async_use_cached_wrapped_rpc( +async def test_generate_client_certificate_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10546,7 +11173,7 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_connection_info + client._client._transport.generate_client_certificate in client._client._transport._wrapped_methods ) @@ -10554,16 +11181,16 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_connection_info + client._client._transport.generate_client_certificate ] = mock_rpc request = {} - await client.get_connection_info(request) + await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_connection_info(request) + await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10571,8 +11198,9 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_connection_info_async( - transport: str = "grpc_asyncio", request_type=service.GetConnectionInfoRequest +async def test_generate_client_certificate_async( + transport: str = "grpc_asyncio", + request_type=service.GenerateClientCertificateRequest, ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -10585,59 +11213,53 @@ async def test_get_connection_info_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ConnectionInfo( - name="name_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", + service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", pem_certificate_chain=["pem_certificate_chain_value"], - instance_uid="instance_uid_value", - psc_dns_name="psc_dns_name_value", + ca_cert="ca_cert_value", ) ) - response = await client.get_connection_info(request) + response = await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetConnectionInfoRequest() + request = service.GenerateClientCertificateRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConnectionInfo) - assert response.name == "name_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.instance_uid == "instance_uid_value" - assert response.psc_dns_name == "psc_dns_name_value" + assert response.ca_cert == "ca_cert_value" @pytest.mark.asyncio -async def test_get_connection_info_async_from_dict(): - await test_get_connection_info_async(request_type=dict) +async def test_generate_client_certificate_async_from_dict(): + await test_generate_client_certificate_async(request_type=dict) -def test_get_connection_info_field_headers(): +def test_generate_client_certificate_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetConnectionInfoRequest() + request = service.GenerateClientCertificateRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: - call.return_value = resources.ConnectionInfo() - client.get_connection_info(request) + call.return_value = service.GenerateClientCertificateResponse() + client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10653,25 +11275,25 @@ def test_get_connection_info_field_headers(): @pytest.mark.asyncio -async def test_get_connection_info_field_headers_async(): +async def test_generate_client_certificate_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetConnectionInfoRequest() + request = service.GenerateClientCertificateRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ConnectionInfo() + service.GenerateClientCertificateResponse() ) - await client.get_connection_info(request) + await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10686,20 +11308,20 @@ async def test_get_connection_info_field_headers_async(): ) in kw["metadata"] -def test_get_connection_info_flattened(): +def test_generate_client_certificate_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ConnectionInfo() + call.return_value = service.GenerateClientCertificateResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_connection_info( + client.generate_client_certificate( parent="parent_value", ) @@ -10712,7 +11334,7 @@ def test_get_connection_info_flattened(): assert arg == mock_val -def test_get_connection_info_flattened_error(): +def test_generate_client_certificate_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10720,31 +11342,31 @@ def test_get_connection_info_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_connection_info( - service.GetConnectionInfoRequest(), + client.generate_client_certificate( + service.GenerateClientCertificateRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_get_connection_info_flattened_async(): +async def test_generate_client_certificate_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ConnectionInfo() + call.return_value = service.GenerateClientCertificateResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ConnectionInfo() + service.GenerateClientCertificateResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_connection_info( + response = await client.generate_client_certificate( parent="parent_value", ) @@ -10758,7 +11380,7 @@ async def test_get_connection_info_flattened_async(): @pytest.mark.asyncio -async def test_get_connection_info_flattened_error_async(): +async def test_generate_client_certificate_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10766,8 +11388,8 @@ async def test_get_connection_info_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_connection_info( - service.GetConnectionInfoRequest(), + await client.generate_client_certificate( + service.GenerateClientCertificateRequest(), parent="parent_value", ) @@ -10775,11 +11397,11 @@ async def test_get_connection_info_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.ListUsersRequest, + service.GetConnectionInfoRequest, dict, ], ) -def test_list_users(request_type, transport: str = "grpc"): +def test_get_connection_info(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10790,27 +11412,37 @@ def test_list_users(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListUsersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + psc_dns_name="psc_dns_name_value", ) - response = client.list_users(request) + response = client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUsersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + assert response.psc_dns_name == "psc_dns_name_value" -def test_list_users_non_empty_request_with_auto_populated_field(): +def test_get_connection_info_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -10821,30 +11453,28 @@ def test_list_users_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListUsersRequest( + request = service.GetConnectionInfoRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_users(request=request) + client.get_connection_info(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListUsersRequest( + assert args[0] == service.GetConnectionInfoRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request_id="request_id_value", ) -def test_list_users_use_cached_wrapped_rpc(): +def test_get_connection_info_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10858,21 +11488,25 @@ def test_list_users_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_users in client._transport._wrapped_methods + assert ( + client._transport.get_connection_info in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_users] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_connection_info + ] = mock_rpc request = {} - client.list_users(request) + client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_users(request) + client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10880,7 +11514,9 @@ def test_list_users_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_connection_info_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10895,7 +11531,7 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_users + client._client._transport.get_connection_info in client._client._transport._wrapped_methods ) @@ -10903,16 +11539,16 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_users + client._client._transport.get_connection_info ] = mock_rpc request = {} - await client.list_users(request) + await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_users(request) + await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10920,8 +11556,8 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_list_users_async( - transport: str = "grpc_asyncio", request_type=service.ListUsersRequest +async def test_get_connection_info_async( + transport: str = "grpc_asyncio", request_type=service.GetConnectionInfoRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -10933,48 +11569,60 @@ async def test_list_users_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListUsersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + psc_dns_name="psc_dns_name_value", ) ) - response = await client.list_users(request) + response = await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUsersAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + assert response.psc_dns_name == "psc_dns_name_value" @pytest.mark.asyncio -async def test_list_users_async_from_dict(): - await test_list_users_async(request_type=dict) +async def test_get_connection_info_async_from_dict(): + await test_get_connection_info_async(request_type=dict) -def test_list_users_field_headers(): +def test_get_connection_info_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: - call.return_value = service.ListUsersResponse() - client.list_users(request) + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + call.return_value = resources.ConnectionInfo() + client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10990,23 +11638,25 @@ def test_list_users_field_headers(): @pytest.mark.asyncio -async def test_list_users_field_headers_async(): +async def test_get_connection_info_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListUsersResponse() + resources.ConnectionInfo() ) - await client.list_users(request) + await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11021,18 +11671,20 @@ async def test_list_users_field_headers_async(): ) in kw["metadata"] -def test_list_users_flattened(): +def test_get_connection_info_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListUsersResponse() + call.return_value = resources.ConnectionInfo() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_users( + client.get_connection_info( parent="parent_value", ) @@ -11045,7 +11697,7 @@ def test_list_users_flattened(): assert arg == mock_val -def test_list_users_flattened_error(): +def test_get_connection_info_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11053,29 +11705,31 @@ def test_list_users_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_users( - service.ListUsersRequest(), + client.get_connection_info( + service.GetConnectionInfoRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_users_flattened_async(): +async def test_get_connection_info_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListUsersResponse() + call.return_value = resources.ConnectionInfo() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListUsersResponse() + resources.ConnectionInfo() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_users( + response = await client.get_connection_info( parent="parent_value", ) @@ -11089,7 +11743,7 @@ async def test_list_users_flattened_async(): @pytest.mark.asyncio -async def test_list_users_flattened_error_async(): +async def test_get_connection_info_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11097,249 +11751,51 @@ async def test_list_users_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_users( - service.ListUsersRequest(), + await client.get_connection_info( + service.GetConnectionInfoRequest(), parent="parent_value", ) -def test_list_users_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListUsersRequest, + dict, + ], +) +def test_list_users(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_users), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_users(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.User) for i in results) - - -def test_list_users_pages(transport_name: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - pages = list(client.list_users(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_users_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_users( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.User) for i in responses) - - -@pytest.mark.asyncio -async def test_list_users_async_pages(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_users(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetUserRequest, - dict, - ], -) -def test_get_user(request_type, transport: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.User( - name="name_value", - password="password_value", - database_roles=["database_roles_value"], - user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + call.return_value = service.ListUsersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.get_user(request) + response = client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetUserRequest() + request = service.ListUsersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.User) - assert response.name == "name_value" - assert response.password == "password_value" - assert response.database_roles == ["database_roles_value"] - assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert isinstance(response, pagers.ListUsersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_user_non_empty_request_with_auto_populated_field(): +def test_list_users_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -11350,24 +11806,30 @@ def test_get_user_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetUserRequest( - name="name_value", + request = service.ListUsersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_user(request=request) + client.list_users(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetUserRequest( - name="name_value", + assert args[0] == service.ListUsersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_user_use_cached_wrapped_rpc(): +def test_list_users_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11381,21 +11843,21 @@ def test_get_user_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_user in client._transport._wrapped_methods + assert client._transport.list_users in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_user] = mock_rpc + client._transport._wrapped_methods[client._transport.list_users] = mock_rpc request = {} - client.get_user(request) + client.list_users(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_user(request) + client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11403,7 +11865,7 @@ def test_get_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -11418,7 +11880,7 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_user + client._client._transport.list_users in client._client._transport._wrapped_methods ) @@ -11426,16 +11888,16 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_user + client._client._transport.list_users ] = mock_rpc request = {} - await client.get_user(request) + await client.list_users(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_user(request) + await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11443,8 +11905,8 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_get_user_async( - transport: str = "grpc_asyncio", request_type=service.GetUserRequest +async def test_list_users_async( + transport: str = "grpc_asyncio", request_type=service.ListUsersRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -11456,52 +11918,48 @@ async def test_get_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.User( - name="name_value", - password="password_value", - database_roles=["database_roles_value"], - user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + service.ListUsersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_user(request) + response = await client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetUserRequest() + request = service.ListUsersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.User) - assert response.name == "name_value" - assert response.password == "password_value" - assert response.database_roles == ["database_roles_value"] - assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert isinstance(response, pagers.ListUsersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_user_async_from_dict(): - await test_get_user_async(request_type=dict) +async def test_list_users_async_from_dict(): + await test_list_users_async(request_type=dict) -def test_get_user_field_headers(): +def test_list_users_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetUserRequest() + request = service.ListUsersRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: - call.return_value = resources.User() - client.get_user(request) + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value = service.ListUsersResponse() + client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11512,26 +11970,28 @@ def test_get_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_user_field_headers_async(): +async def test_list_users_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetUserRequest() + request = service.ListUsersRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) - await client.get_user(request) + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListUsersResponse() + ) + await client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11542,35 +12002,35 @@ async def test_get_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_user_flattened(): +def test_list_users_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.User() + call.return_value = service.ListUsersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_user( - name="name_value", + client.list_users( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_user_flattened_error(): +def test_list_users_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11578,41 +12038,43 @@ def test_get_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_user( - service.GetUserRequest(), - name="name_value", + client.list_users( + service.ListUsersRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_user_flattened_async(): +async def test_list_users_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.User() + call.return_value = service.ListUsersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListUsersResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_user( - name="name_value", + response = await client.list_users( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_user_flattened_error_async(): +async def test_list_users_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11620,115 +12082,307 @@ async def test_get_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_user( - service.GetUserRequest(), - name="name_value", + await client.list_users( + service.ListUsersRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.CreateUserRequest, - dict, - ], -) -def test_create_user(request_type, transport: str = "grpc"): +def test_list_users_pager(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.User( - name="name_value", - password="password_value", - database_roles=["database_roles_value"], - user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, ) - response = client.create_user(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateUserRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_users(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, resources.User) - assert response.name == "name_value" - assert response.password == "password_value" - assert response.database_roles == ["database_roles_value"] - assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.User) for i in results) -def test_create_user_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. + +def test_list_users_pages(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateUserRequest( - parent="parent_value", - user_id="user_id_value", - request_id="request_id_value", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_user(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateUserRequest( - parent="parent_value", - user_id="user_id_value", - request_id="request_id_value", - ) - - -def test_create_user_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, ) + pages = list(client.list_users(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_user in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_user] = mock_rpc - request = {} - client.create_user(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +@pytest.mark.asyncio +async def test_list_users_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.create_user(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_users( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.User) for i in responses) + + +@pytest.mark.asyncio +async def test_list_users_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_users(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetUserRequest, + dict, + ], +) +def test_get_user(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.User( + name="name_value", + password="password_value", + database_roles=["database_roles_value"], + user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, + ) + response = client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.User) + assert response.name == "name_value" + assert response.password == "password_value" + assert response.database_roles == ["database_roles_value"] + assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True + + +def test_get_user_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetUserRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_user(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetUserRequest( + name="name_value", + ) + + +def test_get_user_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_user] = mock_rpc + request = {} + client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11736,9 +12390,7 @@ def test_create_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_user_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -11753,7 +12405,7 @@ async def test_create_user_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_user + client._client._transport.get_user in client._client._transport._wrapped_methods ) @@ -11761,16 +12413,16 @@ async def test_create_user_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_user + client._client._transport.get_user ] = mock_rpc request = {} - await client.create_user(request) + await client.get_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_user(request) + await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11778,8 +12430,8 @@ async def test_create_user_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_user_async( - transport: str = "grpc_asyncio", request_type=service.CreateUserRequest +async def test_get_user_async( + transport: str = "grpc_asyncio", request_type=service.GetUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -11791,7 +12443,7 @@ async def test_create_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.User( @@ -11799,14 +12451,15 @@ async def test_create_user_async( password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) - response = await client.create_user(request) + response = await client.get_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateUserRequest() + request = service.GetUserRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -11815,28 +12468,29 @@ async def test_create_user_async( assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.asyncio -async def test_create_user_async_from_dict(): - await test_create_user_async(request_type=dict) +async def test_get_user_async_from_dict(): + await test_get_user_async(request_type=dict) -def test_create_user_field_headers(): +def test_get_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateUserRequest() + request = service.GetUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: call.return_value = resources.User() - client.create_user(request) + client.get_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11847,26 +12501,26 @@ def test_create_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_user_field_headers_async(): +async def test_get_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateUserRequest() + request = service.GetUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) - await client.create_user(request) + await client.get_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11877,43 +12531,35 @@ async def test_create_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_user_flattened(): +def test_get_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_user( - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + client.get_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].user - mock_val = resources.User(name="name_value") - assert arg == mock_val - arg = args[0].user_id - mock_val = "user_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_user_flattened_error(): +def test_get_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11921,51 +12567,41 @@ def test_create_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_user( - service.CreateUserRequest(), - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + client.get_user( + service.GetUserRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_user_flattened_async(): +async def test_get_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_user( - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + response = await client.get_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].user - mock_val = resources.User(name="name_value") - assert arg == mock_val - arg = args[0].user_id - mock_val = "user_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_user_flattened_error_async(): +async def test_get_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11973,22 +12609,20 @@ async def test_create_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_user( - service.CreateUserRequest(), - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + await client.get_user( + service.GetUserRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateUserRequest, + service.CreateUserRequest, dict, ], ) -def test_update_user(request_type, transport: str = "grpc"): +def test_create_user(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11999,20 +12633,21 @@ def test_update_user(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User( name="name_value", password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) - response = client.update_user(request) + response = client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateUserRequest() + request = service.CreateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -12021,9 +12656,10 @@ def test_update_user(request_type, transport: str = "grpc"): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True -def test_update_user_non_empty_request_with_auto_populated_field(): +def test_create_user_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -12034,24 +12670,28 @@ def test_update_user_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateUserRequest( + request = service.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_user(request=request) + client.create_user(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateUserRequest( + assert args[0] == service.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", request_id="request_id_value", ) -def test_update_user_use_cached_wrapped_rpc(): +def test_create_user_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12065,21 +12705,21 @@ def test_update_user_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_user in client._transport._wrapped_methods + assert client._transport.create_user in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_user] = mock_rpc + client._transport._wrapped_methods[client._transport.create_user] = mock_rpc request = {} - client.update_user(request) + client.create_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_user(request) + client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12087,7 +12727,7 @@ def test_update_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_user_async_use_cached_wrapped_rpc( +async def test_create_user_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12104,7 +12744,7 @@ async def test_update_user_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_user + client._client._transport.create_user in client._client._transport._wrapped_methods ) @@ -12112,16 +12752,16 @@ async def test_update_user_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_user + client._client._transport.create_user ] = mock_rpc request = {} - await client.update_user(request) + await client.create_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_user(request) + await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12129,8 +12769,8 @@ async def test_update_user_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_user_async( - transport: str = "grpc_asyncio", request_type=service.UpdateUserRequest +async def test_create_user_async( + transport: str = "grpc_asyncio", request_type=service.CreateUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -12142,7 +12782,7 @@ async def test_update_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.User( @@ -12150,14 +12790,15 @@ async def test_update_user_async( password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) - response = await client.update_user(request) + response = await client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateUserRequest() + request = service.CreateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -12166,28 +12807,29 @@ async def test_update_user_async( assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.asyncio -async def test_update_user_async_from_dict(): - await test_update_user_async(request_type=dict) +async def test_create_user_async_from_dict(): + await test_create_user_async(request_type=dict) -def test_update_user_field_headers(): +def test_create_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateUserRequest() + request = service.CreateUserRequest() - request.user.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: call.return_value = resources.User() - client.update_user(request) + client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12198,26 +12840,26 @@ def test_update_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "user.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_user_field_headers_async(): +async def test_create_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateUserRequest() + request = service.CreateUserRequest() - request.user.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) - await client.update_user(request) + await client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12228,39 +12870,43 @@ async def test_update_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "user.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_user_flattened(): +def test_create_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_user( + client.create_user( + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].user mock_val = resources.User(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].user_id + mock_val = "user_id_value" assert arg == mock_val -def test_update_user_flattened_error(): +def test_create_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12268,46 +12914,51 @@ def test_update_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_user( - service.UpdateUserRequest(), + client.create_user( + service.CreateUserRequest(), + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) @pytest.mark.asyncio -async def test_update_user_flattened_async(): +async def test_create_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_user( + response = await client.create_user( + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].user mock_val = resources.User(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].user_id + mock_val = "user_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_user_flattened_error_async(): +async def test_create_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12315,21 +12966,22 @@ async def test_update_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_user( - service.UpdateUserRequest(), + await client.create_user( + service.CreateUserRequest(), + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) @pytest.mark.parametrize( "request_type", [ - service.DeleteUserRequest, + service.UpdateUserRequest, dict, ], ) -def test_delete_user(request_type, transport: str = "grpc"): +def test_update_user(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12340,22 +12992,33 @@ def test_delete_user(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_user(request) + call.return_value = resources.User( + name="name_value", + password="password_value", + database_roles=["database_roles_value"], + user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, + ) + response = client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.User) + assert response.name == "name_value" + assert response.password == "password_value" + assert response.database_roles == ["database_roles_value"] + assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True -def test_delete_user_non_empty_request_with_auto_populated_field(): +def test_update_user_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -12366,26 +13029,24 @@ def test_delete_user_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteUserRequest( - name="name_value", + request = service.UpdateUserRequest( request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_user(request=request) + client.update_user(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteUserRequest( - name="name_value", + assert args[0] == service.UpdateUserRequest( request_id="request_id_value", ) -def test_delete_user_use_cached_wrapped_rpc(): +def test_update_user_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12399,21 +13060,21 @@ def test_delete_user_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_user in client._transport._wrapped_methods + assert client._transport.update_user in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_user] = mock_rpc + client._transport._wrapped_methods[client._transport.update_user] = mock_rpc request = {} - client.delete_user(request) + client.update_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_user(request) + client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12421,7 +13082,7 @@ def test_delete_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_user_async_use_cached_wrapped_rpc( +async def test_update_user_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12438,7 +13099,7 @@ async def test_delete_user_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_user + client._client._transport.update_user in client._client._transport._wrapped_methods ) @@ -12446,16 +13107,16 @@ async def test_delete_user_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_user + client._client._transport.update_user ] = mock_rpc request = {} - await client.delete_user(request) + await client.update_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_user(request) + await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12463,8 +13124,8 @@ async def test_delete_user_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_user_async( - transport: str = "grpc_asyncio", request_type=service.DeleteUserRequest +async def test_update_user_async( + transport: str = "grpc_asyncio", request_type=service.UpdateUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -12476,41 +13137,54 @@ async def test_delete_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_user(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.User( + name="name_value", + password="password_value", + database_roles=["database_roles_value"], + user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, + ) + ) + response = await client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None - - + assert isinstance(response, resources.User) + assert response.name == "name_value" + assert response.password == "password_value" + assert response.database_roles == ["database_roles_value"] + assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True + + @pytest.mark.asyncio -async def test_delete_user_async_from_dict(): - await test_delete_user_async(request_type=dict) +async def test_update_user_async_from_dict(): + await test_update_user_async(request_type=dict) -def test_delete_user_field_headers(): +def test_update_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() - request.name = "name_value" + request.user.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: - call.return_value = None - client.delete_user(request) + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value = resources.User() + client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12521,26 +13195,26 @@ def test_delete_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "user.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_user_field_headers_async(): +async def test_update_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() - request.name = "name_value" + request.user.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_user(request) + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) + await client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12551,35 +13225,39 @@ async def test_delete_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "user.name=name_value", ) in kw["metadata"] -def test_delete_user_flattened(): +def test_update_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.User() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_user( - name="name_value", + client.update_user( + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].user + mock_val = resources.User(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_user_flattened_error(): +def test_update_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12587,41 +13265,46 @@ def test_delete_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_user( - service.DeleteUserRequest(), - name="name_value", + client.update_user( + service.UpdateUserRequest(), + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_user_flattened_async(): +async def test_update_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.User() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_user( - name="name_value", + response = await client.update_user( + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].user + mock_val = resources.User(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_user_flattened_error_async(): +async def test_update_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12629,20 +13312,21 @@ async def test_delete_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_user( - service.DeleteUserRequest(), - name="name_value", + await client.update_user( + service.UpdateUserRequest(), + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.ListDatabasesRequest, + service.DeleteUserRequest, dict, ], ) -def test_list_databases(request_type, transport: str = "grpc"): +def test_delete_user(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12653,25 +13337,22 @@ def test_list_databases(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListDatabasesResponse( - next_page_token="next_page_token_value", - ) - response = client.list_databases(request) + call.return_value = None + response = client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_databases_non_empty_request_with_auto_populated_field(): +def test_delete_user_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -12682,28 +13363,26 @@ def test_list_databases_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListDatabasesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", + request = service.DeleteUserRequest( + name="name_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_databases(request=request) + client.delete_user(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListDatabasesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", + assert args[0] == service.DeleteUserRequest( + name="name_value", + request_id="request_id_value", ) -def test_list_databases_use_cached_wrapped_rpc(): +def test_delete_user_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12717,21 +13396,21 @@ def test_list_databases_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods + assert client._transport.delete_user in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_user] = mock_rpc request = {} - client.list_databases(request) + client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_databases(request) + client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12739,7 +13418,7 @@ def test_list_databases_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_databases_async_use_cached_wrapped_rpc( +async def test_delete_user_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12756,7 +13435,7 @@ async def test_list_databases_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_databases + client._client._transport.delete_user in client._client._transport._wrapped_methods ) @@ -12764,16 +13443,16 @@ async def test_list_databases_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_databases + client._client._transport.delete_user ] = mock_rpc request = {} - await client.list_databases(request) + await client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_databases(request) + await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12781,8 +13460,8 @@ async def test_list_databases_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_databases_async( - transport: str = "grpc_asyncio", request_type=service.ListDatabasesRequest +async def test_delete_user_async( + transport: str = "grpc_asyncio", request_type=service.DeleteUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -12794,46 +13473,41 @@ async def test_list_databases_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListDatabasesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_databases(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) +async def test_delete_user_async_from_dict(): + await test_delete_user_async(request_type=dict) -def test_list_databases_field_headers(): +def test_delete_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value = service.ListDatabasesResponse() - client.list_databases(request) + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value = None + client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12844,28 +13518,26 @@ def test_list_databases_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_databases_field_headers_async(): +async def test_delete_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListDatabasesResponse() - ) - await client.list_databases(request) + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12876,35 +13548,35 @@ async def test_list_databases_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_databases_flattened(): +def test_delete_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListDatabasesResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_databases( - parent="parent_value", + client.delete_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_databases_flattened_error(): +def test_delete_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12912,43 +13584,41 @@ def test_list_databases_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_databases( - service.ListDatabasesRequest(), - parent="parent_value", + client.delete_user( + service.DeleteUserRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_databases_flattened_async(): +async def test_delete_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListDatabasesResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListDatabasesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_databases( - parent="parent_value", + response = await client.delete_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): +async def test_delete_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12956,169 +13626,403 @@ async def test_list_databases_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_databases( - service.ListDatabasesRequest(), - parent="parent_value", + await client.delete_user( + service.DeleteUserRequest(), + name="name_value", ) -def test_list_databases_pager(transport_name: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, +@pytest.mark.parametrize( + "request_type", + [ + service.ListDatabasesRequest, + dict, + ], +) +def test_list_databases(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse( + next_page_token="next_page_token_value", ) - pager = client.list_databases(request={}, retry=retry, timeout=timeout) + response = client.list_databases(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListDatabasesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Database) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" -def test_list_databases_pages(transport_name: str = "grpc"): +def test_list_databases_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListDatabasesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_databases(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListDatabasesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) - pages = list(client.list_databases(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_databases_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) +def test_list_databases_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - RuntimeError, + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_databases( - request={}, + client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + request = {} + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_databases_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Database) for i in responses) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_databases + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_databases + ] = mock_rpc + + request = {} + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_databases_async_pages(): +async def test_list_databases_async( + transport: str = "grpc_asyncio", request_type=service.ListDatabasesRequest +): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + + +def test_list_databases_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = service.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse() + ) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_databases_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_databases(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Database) for i in results) + + +def test_list_databases_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( service.ListDatabasesResponse( databases=[ resources.Database(), @@ -13145,18 +14049,541 @@ async def test_list_databases_async_pages(): ), RuntimeError, ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_databases(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Database) for i in responses) + + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_databases(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_clusters_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + + request = {} + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +def test_get_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) -def test_list_clusters_rest_use_cached_wrapped_rpc(): +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +def test_create_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13170,33 +14597,38 @@ def test_list_clusters_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_clusters in client._transport._wrapped_methods + assert client._transport.create_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc request = {} - client.list_clusters(request) + client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_clusters(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): +def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} request_init["parent"] = "" + request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13204,26 +14636,29 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq ) # verify fields with default values are dropped + assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "cluster_id", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -13231,6 +14666,8 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13239,7 +14676,7 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListClustersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13251,48 +14688,57 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.create_cluster(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_clusters_rest_unset_required_fields(): +def test_create_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_clusters._get_unset_required_fields({}) + unset_fields = transport.create_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", ) ) - & set(("parent",)) ) -def test_list_clusters_rest_flattened(): +def test_create_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13301,7 +14747,7 @@ def test_list_clusters_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListClustersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -13309,19 +14755,21 @@ def test_list_clusters_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_clusters(**mock_args) + client.create_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -13334,7 +14782,7 @@ def test_list_clusters_rest_flattened(): ) -def test_list_clusters_rest_flattened_error(transport: str = "rest"): +def test_create_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13343,74 +14791,17 @@ def test_list_clusters_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_clusters( - service.ListClustersRequest(), + client.create_cluster( + service.CreateClusterRequest(), parent="parent_value", - ) - - -def test_list_clusters_rest_pager(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - resources.Cluster(), - resources.Cluster(), - ], - next_page_token="abc", - ), - service.ListClustersResponse( - clusters=[], - next_page_token="def", - ), - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - ], - next_page_token="ghi", - ), - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - resources.Cluster(), - ], + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), + cluster_id="cluster_id_value", ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListClustersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_clusters(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Cluster) for i in results) - - pages = list(client.list_clusters(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_get_cluster_rest_use_cached_wrapped_rpc(): +def test_update_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13424,33 +14815,36 @@ def test_get_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cluster in client._transport._wrapped_methods + assert client._transport.update_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc request = {} - client.get_cluster(request) + client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_cluster(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): +def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13461,23 +14855,26 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13486,7 +14883,7 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13498,38 +14895,46 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.update_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_cluster_rest_unset_required_fields(): +def test_update_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("cluster",)) + ) -def test_get_cluster_rest_flattened(): +def test_update_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13538,40 +14943,43 @@ def test_get_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_cluster(**mock_args) + client.update_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" + "%s/v1alpha/{cluster.name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_get_cluster_rest_flattened_error(transport: str = "rest"): +def test_update_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13580,13 +14988,16 @@ def test_get_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_cluster( - service.GetClusterRequest(), - name="name_value", + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_cluster_rest_use_cached_wrapped_rpc(): +def test_upgrade_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13600,17 +15011,17 @@ def test_create_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cluster in client._transport._wrapped_methods + assert client._transport.upgrade_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.upgrade_cluster] = mock_rpc request = {} - client.create_cluster(request) + client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13619,19 +15030,20 @@ def test_create_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_cluster(request) + client.upgrade_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): +def test_upgrade_cluster_rest_required_fields( + request_type=service.UpgradeClusterRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["cluster_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13639,38 +15051,24 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR ) # verify fields with default values are dropped - assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).upgrade_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["clusterId"] = "cluster_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "cluster_id", - "request_id", - "validate_only", - ) - ) + ).upgrade_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == "cluster_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13691,7 +15089,7 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -13704,44 +15102,31 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.upgrade_cluster(request) - expected_params = [ - ( - "clusterId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_cluster_rest_unset_required_fields(): +def test_upgrade_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_cluster._get_unset_required_fields({}) + unset_fields = transport.upgrade_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "clusterId", - "requestId", - "validateOnly", - ) - ) + set(()) & set( ( - "parent", - "clusterId", - "cluster", + "name", + "version", ) ) ) -def test_create_cluster_rest_flattened(): +def test_upgrade_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13753,15 +15138,12 @@ def test_create_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) mock_args.update(sample_request) @@ -13772,20 +15154,20 @@ def test_create_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_cluster(**mock_args) + client.upgrade_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/clusters" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}:upgrade" % client.transport._host, args[1], ) -def test_create_cluster_rest_flattened_error(transport: str = "rest"): +def test_upgrade_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13794,17 +15176,14 @@ def test_create_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_cluster( - service.CreateClusterRequest(), - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + client.upgrade_cluster( + service.UpgradeClusterRequest(), + name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) -def test_update_cluster_rest_use_cached_wrapped_rpc(): +def test_delete_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13818,17 +15197,17 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_cluster in client._transport._wrapped_methods + assert client._transport.delete_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc request = {} - client.update_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13837,17 +15216,18 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_cluster(request) + client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): +def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13858,26 +15238,30 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "etag", + "force", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13898,10 +15282,9 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -13911,33 +15294,33 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_cluster_rest_unset_required_fields(): +def test_delete_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_cluster._get_unset_required_fields({}) + unset_fields = transport.delete_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "etag", + "force", "requestId", - "updateMask", "validateOnly", ) ) - & set(("cluster",)) + & set(("name",)) ) -def test_update_cluster_rest_flattened(): +def test_delete_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13949,16 +15332,11 @@ def test_update_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -13969,20 +15347,20 @@ def test_update_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_cluster(**mock_args) + client.delete_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{cluster.name=projects/*/locations/*/clusters/*}" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_update_cluster_rest_flattened_error(transport: str = "rest"): +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13991,16 +15369,13 @@ def test_update_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_cluster( - service.UpdateClusterRequest(), - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", ) -def test_delete_cluster_rest_use_cached_wrapped_rpc(): +def test_promote_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14014,17 +15389,17 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cluster in client._transport._wrapped_methods + assert client._transport.promote_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc request = {} - client.delete_cluster(request) + client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14033,14 +15408,16 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_cluster(request) + client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): +def test_promote_cluster_rest_required_fields( + request_type=service.PromoteClusterRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} @@ -14055,7 +15432,7 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).promote_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14064,16 +15441,7 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "etag", - "force", - "request_id", - "validate_only", - ) - ) + ).promote_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14099,9 +15467,10 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -14111,33 +15480,23 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.promote_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_cluster_rest_unset_required_fields(): +def test_promote_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "etag", - "force", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.promote_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_cluster_rest_flattened(): +def test_promote_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14164,20 +15523,20 @@ def test_delete_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_cluster(**mock_args) + client.promote_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}:promote" % client.transport._host, args[1], ) -def test_delete_cluster_rest_flattened_error(transport: str = "rest"): +def test_promote_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14186,13 +15545,13 @@ def test_delete_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - service.DeleteClusterRequest(), + client.promote_cluster( + service.PromoteClusterRequest(), name="name_value", ) -def test_promote_cluster_rest_use_cached_wrapped_rpc(): +def test_switchover_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14206,17 +15565,21 @@ def test_promote_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.promote_cluster in client._transport._wrapped_methods + assert ( + client._transport.switchover_cluster in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc + client._transport._wrapped_methods[ + client._transport.switchover_cluster + ] = mock_rpc request = {} - client.promote_cluster(request) + client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14225,15 +15588,15 @@ def test_promote_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.promote_cluster(request) + client.switchover_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_promote_cluster_rest_required_fields( - request_type=service.PromoteClusterRequest, +def test_switchover_cluster_rest_required_fields( + request_type=service.SwitchoverClusterRequest, ): transport_class = transports.AlloyDBAdminRestTransport @@ -14249,7 +15612,7 @@ def test_promote_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).promote_cluster._get_unset_required_fields(jsonified_request) + ).switchover_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14258,7 +15621,7 @@ def test_promote_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).promote_cluster._get_unset_required_fields(jsonified_request) + ).switchover_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14297,23 +15660,23 @@ def test_promote_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.promote_cluster(request) + response = client.switchover_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_promote_cluster_rest_unset_required_fields(): +def test_switchover_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.promote_cluster._get_unset_required_fields({}) + unset_fields = transport.switchover_cluster._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_promote_cluster_rest_flattened(): +def test_switchover_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14340,20 +15703,20 @@ def test_promote_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.promote_cluster(**mock_args) + client.switchover_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*}:promote" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}:switchover" % client.transport._host, args[1], ) -def test_promote_cluster_rest_flattened_error(transport: str = "rest"): +def test_switchover_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14362,8 +15725,8 @@ def test_promote_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.promote_cluster( - service.PromoteClusterRequest(), + client.switchover_cluster( + service.SwitchoverClusterRequest(), name="name_value", ) @@ -16676,6 +18039,212 @@ def test_restart_instance_rest_flattened_error(transport: str = "rest"): ) +def test_execute_sql_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc + + request = {} + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_sql_rest_required_fields(request_type=service.ExecuteSqlRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["database"] = "" + request_init["user"] = "" + request_init["sql_statement"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + jsonified_request["database"] = "database_value" + jsonified_request["user"] = "user_value" + jsonified_request["sqlStatement"] = "sql_statement_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + assert "database" in jsonified_request + assert jsonified_request["database"] == "database_value" + assert "user" in jsonified_request + assert jsonified_request["user"] == "user_value" + assert "sqlStatement" in jsonified_request + assert jsonified_request["sqlStatement"] == "sql_statement_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ExecuteSqlResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ExecuteSqlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.execute_sql(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_execute_sql_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.execute_sql._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instance", + "database", + "user", + "sqlStatement", + ) + ) + ) + + +def test_execute_sql_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ExecuteSqlResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ExecuteSqlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.execute_sql(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{instance=projects/*/locations/*/clusters/*/instances/*}:executeSql" + % client.transport._host, + args[1], + ) + + +def test_execute_sql_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.execute_sql( + service.ExecuteSqlRequest(), + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", + ) + + def test_list_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -19807,6 +21376,27 @@ def test_update_cluster_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_cluster_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.upgrade_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpgradeClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_cluster_empty_call_grpc(): @@ -19849,6 +21439,29 @@ def test_promote_cluster_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_switchover_cluster_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.switchover_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.switchover_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.SwitchoverClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_restore_cluster_empty_call_grpc(): @@ -20109,6 +21722,27 @@ def test_restart_instance_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + call.return_value = service.ExecuteSqlResponse() + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ExecuteSqlRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_backups_empty_call_grpc(): @@ -20476,6 +22110,7 @@ async def test_get_cluster_empty_call_grpc_asyncio(): reconciling=True, satisfies_pzi=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) ) await client.get_cluster(request=None) @@ -20523,17 +22158,67 @@ async def test_update_cluster_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateClusterRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_upgrade_cluster_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.upgrade_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpgradeClusterRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_cluster_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.update_cluster(request=None) + await client.delete_cluster(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.UpdateClusterRequest() + request_msg = service.DeleteClusterRequest() assert args[0] == request_msg @@ -20541,24 +22226,24 @@ async def test_update_cluster_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_delete_cluster_empty_call_grpc_asyncio(): +async def test_promote_cluster_empty_call_grpc_asyncio(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.delete_cluster(request=None) + await client.promote_cluster(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.DeleteClusterRequest() + request_msg = service.PromoteClusterRequest() assert args[0] == request_msg @@ -20566,24 +22251,26 @@ async def test_delete_cluster_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_promote_cluster_empty_call_grpc_asyncio(): +async def test_switchover_cluster_empty_call_grpc_asyncio(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object( + type(client.transport.switchover_cluster), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.promote_cluster(request=None) + await client.switchover_cluster(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.PromoteClusterRequest() + request_msg = service.SwitchoverClusterRequest() assert args[0] == request_msg @@ -20695,6 +22382,7 @@ async def test_get_instance_empty_call_grpc_asyncio(): etag="etag_value", satisfies_pzi=True, satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) ) await client.get_instance(request=None) @@ -20913,6 +22601,31 @@ async def test_restart_instance_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_sql_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ExecuteSqlResponse() + ) + await client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ExecuteSqlRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -21196,6 +22909,7 @@ async def test_get_user_empty_call_grpc_asyncio(): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) await client.get_user(request=None) @@ -21226,6 +22940,7 @@ async def test_create_user_empty_call_grpc_asyncio(): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) await client.create_user(request=None) @@ -21256,6 +22971,7 @@ async def test_update_user_empty_call_grpc_asyncio(): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) await client.update_user(request=None) @@ -21497,6 +23213,7 @@ def test_get_cluster_rest_call_success(request_type): reconciling=True, satisfies_pzi=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) # Wrap the value into a proper Response obj @@ -21523,6 +23240,7 @@ def test_get_cluster_rest_call_success(request_type): assert response.reconciling is True assert response.satisfies_pzi is True assert response.satisfies_pzs is True + assert response.subscription_type == resources.SubscriptionType.STANDARD @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -21625,6 +23343,11 @@ def test_create_cluster_rest_call_success(request_type): "reference_id": "reference_id_value", "source_type": 1, }, + "cloudsql_backup_run_source": { + "project": "project_value", + "instance_id": "instance_id_value", + "backup_run_id": 1366, + }, "name": "name_value", "display_name": "display_name_value", "uid": "uid_value", @@ -21687,7 +23410,20 @@ def test_create_cluster_rest_call_success(request_type): }, "satisfies_pzi": True, "satisfies_pzs": True, - "psc_config": {"psc_enabled": True}, + "psc_config": {"psc_enabled": True, "service_owned_project_number": 2987}, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, + "gemini_config": {"entitled": True}, + "subscription_type": 1, + "trial_metadata": { + "start_time": {}, + "end_time": {}, + "upgrade_time": {}, + "grace_end_time": {}, + }, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -21881,6 +23617,11 @@ def test_update_cluster_rest_call_success(request_type): "reference_id": "reference_id_value", "source_type": 1, }, + "cloudsql_backup_run_source": { + "project": "project_value", + "instance_id": "instance_id_value", + "backup_run_id": 1366, + }, "name": "projects/sample1/locations/sample2/clusters/sample3", "display_name": "display_name_value", "uid": "uid_value", @@ -21943,7 +23684,20 @@ def test_update_cluster_rest_call_success(request_type): }, "satisfies_pzi": True, "satisfies_pzs": True, - "psc_config": {"psc_enabled": True}, + "psc_config": {"psc_enabled": True, "service_owned_project_number": 2987}, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, + "gemini_config": {"entitled": True}, + "subscription_type": 1, + "trial_metadata": { + "start_time": {}, + "end_time": {}, + "upgrade_time": {}, + "grace_end_time": {}, + }, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -22000,18 +23754,240 @@ def get_message_fields(field): } ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upgrade_cluster_rest_bad_request(request_type=service.UpgradeClusterRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.upgrade_cluster(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpgradeClusterRequest, + dict, + ], +) +def test_upgrade_cluster_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.upgrade_cluster(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upgrade_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_upgrade_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_upgrade_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpgradeClusterRequest.pb(service.UpgradeClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpgradeClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.upgrade_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_cluster(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22025,14 +24001,14 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cluster_rest_interceptors(null_interceptor): +def test_delete_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22048,13 +24024,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22067,7 +24043,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = service.UpdateClusterRequest() + request = service.DeleteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22075,7 +24051,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_cluster( + client.delete_cluster( request, metadata=[ ("key", "val"), @@ -22087,7 +24063,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): +def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22106,17 +24082,17 @@ def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.delete_cluster(request) + client.promote_cluster(request) @pytest.mark.parametrize( "request_type", [ - service.DeleteClusterRequest, + service.PromoteClusterRequest, dict, ], ) -def test_delete_cluster_rest_call_success(request_type): +def test_promote_cluster_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22136,14 +24112,14 @@ def test_delete_cluster_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.promote_cluster(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_cluster_rest_interceptors(null_interceptor): +def test_promote_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22159,13 +24135,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) + pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22178,7 +24154,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = service.DeleteClusterRequest() + request = service.PromoteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22186,7 +24162,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cluster( + client.promote_cluster( request, metadata=[ ("key", "val"), @@ -22198,7 +24174,9 @@ def test_delete_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterRequest): +def test_switchover_cluster_rest_bad_request( + request_type=service.SwitchoverClusterRequest, +): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22217,17 +24195,17 @@ def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterReq response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.promote_cluster(request) + client.switchover_cluster(request) @pytest.mark.parametrize( "request_type", [ - service.PromoteClusterRequest, + service.SwitchoverClusterRequest, dict, ], ) -def test_promote_cluster_rest_call_success(request_type): +def test_switchover_cluster_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22247,14 +24225,14 @@ def test_promote_cluster_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.promote_cluster(request) + response = client.switchover_cluster(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_promote_cluster_rest_interceptors(null_interceptor): +def test_switchover_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22270,13 +24248,15 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" + transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_switchover_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) + pb_message = service.SwitchoverClusterRequest.pb( + service.SwitchoverClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22289,7 +24269,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = service.PromoteClusterRequest() + request = service.SwitchoverClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22297,7 +24277,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.promote_cluster( + client.switchover_cluster( request, metadata=[ ("key", "val"), @@ -22468,6 +24448,11 @@ def test_create_secondary_cluster_rest_call_success(request_type): "reference_id": "reference_id_value", "source_type": 1, }, + "cloudsql_backup_run_source": { + "project": "project_value", + "instance_id": "instance_id_value", + "backup_run_id": 1366, + }, "name": "name_value", "display_name": "display_name_value", "uid": "uid_value", @@ -22530,7 +24515,20 @@ def test_create_secondary_cluster_rest_call_success(request_type): }, "satisfies_pzi": True, "satisfies_pzs": True, - "psc_config": {"psc_enabled": True}, + "psc_config": {"psc_enabled": True, "service_owned_project_number": 2987}, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, + "gemini_config": {"entitled": True}, + "subscription_type": 1, + "trial_metadata": { + "start_time": {}, + "end_time": {}, + "upgrade_time": {}, + "grace_end_time": {}, + }, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -22854,6 +24852,7 @@ def test_get_instance_rest_call_success(request_type): etag="etag_value", satisfies_pzi=True, satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) # Wrap the value into a proper Response obj @@ -22882,6 +24881,9 @@ def test_get_instance_rest_call_success(request_type): assert response.etag == "etag_value" assert response.satisfies_pzi is True assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -23001,6 +25003,17 @@ def test_create_instance_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23020,29 +25033,21 @@ def test_create_instance_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], + "psc_dns_name": "psc_dns_name_value", "psc_interface_configs": [ - { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", - } + {"network_attachment_resource": "network_attachment_resource_value"} ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", - ], - "psc_enabled": True, }, "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -23251,6 +25256,17 @@ def test_create_secondary_instance_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23270,29 +25286,21 @@ def test_create_secondary_instance_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], + "psc_dns_name": "psc_dns_name_value", "psc_interface_configs": [ - { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", - } - ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", + {"network_attachment_resource": "network_attachment_resource_value"} ], - "psc_enabled": True, }, "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -23508,6 +25516,17 @@ def test_batch_create_instances_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23527,31 +25546,25 @@ def test_batch_create_instances_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], + "psc_dns_name": "psc_dns_name_value", "psc_interface_configs": [ { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", + "network_attachment_resource": "network_attachment_resource_value" } ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", - ], - "psc_enabled": True, }, "network_config": { "authorized_external_networks": [ {"cidr_range": "cidr_range_value"} ], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], }, "request_id": "request_id_value", "validate_only": True, @@ -23773,6 +25786,17 @@ def test_update_instance_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23792,29 +25816,21 @@ def test_update_instance_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], + "psc_dns_name": "psc_dns_name_value", "psc_interface_configs": [ - { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", - } + {"network_attachment_resource": "network_attachment_resource_value"} ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", - ], - "psc_enabled": True, }, "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -24422,6 +26438,122 @@ def test_restart_instance_rest_interceptors(null_interceptor): post.assert_called_once() +def test_execute_sql_rest_bad_request(request_type=service.ExecuteSqlRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "instance": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.execute_sql(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ExecuteSqlRequest, + dict, + ], +) +def test_execute_sql_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ExecuteSqlResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ExecuteSqlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.execute_sql(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ExecuteSqlResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_sql_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_execute_sql" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_execute_sql" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ExecuteSqlRequest.pb(service.ExecuteSqlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = service.ExecuteSqlResponse.to_json(service.ExecuteSqlResponse()) + req.return_value.content = return_value + + request = service.ExecuteSqlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ExecuteSqlResponse() + + client.execute_sql( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + def test_list_backups_rest_bad_request(request_type=service.ListBackupsRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -24745,6 +26877,7 @@ def test_create_backup_rest_call_success(request_type): "satisfies_pzi": True, "satisfies_pzs": True, "database_version": 1, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -24955,6 +27088,7 @@ def test_update_backup_rest_call_success(request_type): "satisfies_pzi": True, "satisfies_pzs": True, "database_version": 1, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -25755,6 +27889,7 @@ def test_get_user_rest_call_success(request_type): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) # Wrap the value into a proper Response obj @@ -25774,6 +27909,7 @@ def test_get_user_rest_call_success(request_type): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -25871,6 +28007,7 @@ def test_create_user_rest_call_success(request_type): "password": "password_value", "database_roles": ["database_roles_value1", "database_roles_value2"], "user_type": 1, + "keep_extra_roles": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -25949,6 +28086,7 @@ def get_message_fields(field): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) # Wrap the value into a proper Response obj @@ -25968,6 +28106,7 @@ def get_message_fields(field): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -26073,6 +28212,7 @@ def test_update_user_rest_call_success(request_type): "password": "password_value", "database_roles": ["database_roles_value1", "database_roles_value2"], "user_type": 1, + "keep_extra_roles": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -26151,6 +28291,7 @@ def get_message_fields(field): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) # Wrap the value into a proper Response obj @@ -26170,6 +28311,7 @@ def get_message_fields(field): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -26892,6 +29034,26 @@ def test_update_cluster_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_cluster_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: + client.upgrade_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpgradeClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_cluster_empty_call_rest(): @@ -26932,6 +29094,28 @@ def test_promote_cluster_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_switchover_cluster_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.switchover_cluster), "__call__" + ) as call: + client.switchover_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.SwitchoverClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_restore_cluster_empty_call_rest(): @@ -27180,6 +29364,26 @@ def test_restart_instance_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ExecuteSqlRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_backups_empty_call_rest(): @@ -27520,8 +29724,10 @@ def test_alloy_db_admin_base_transport(): "get_cluster", "create_cluster", "update_cluster", + "upgrade_cluster", "delete_cluster", "promote_cluster", + "switchover_cluster", "restore_cluster", "create_secondary_cluster", "list_instances", @@ -27534,6 +29740,7 @@ def test_alloy_db_admin_base_transport(): "failover_instance", "inject_fault", "restart_instance", + "execute_sql", "list_backups", "get_backup", "create_backup", @@ -27826,12 +30033,18 @@ def test_alloy_db_admin_client_transport_session_collision(transport_name): session1 = client1.transport.update_cluster._session session2 = client2.transport.update_cluster._session assert session1 != session2 + session1 = client1.transport.upgrade_cluster._session + session2 = client2.transport.upgrade_cluster._session + assert session1 != session2 session1 = client1.transport.delete_cluster._session session2 = client2.transport.delete_cluster._session assert session1 != session2 session1 = client1.transport.promote_cluster._session session2 = client2.transport.promote_cluster._session assert session1 != session2 + session1 = client1.transport.switchover_cluster._session + session2 = client2.transport.switchover_cluster._session + assert session1 != session2 session1 = client1.transport.restore_cluster._session session2 = client2.transport.restore_cluster._session assert session1 != session2 @@ -27868,6 +30081,9 @@ def test_alloy_db_admin_client_transport_session_collision(transport_name): session1 = client1.transport.restart_instance._session session2 = client2.transport.restart_instance._session assert session1 != session2 + session1 = client1.transport.execute_sql._session + session2 = client2.transport.execute_sql._session + assert session1 != session2 session1 = client1.transport.list_backups._session session2 = client2.transport.list_backups._session assert session1 != session2 diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index ad8be746aa06..3083343db32a 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -78,7 +78,13 @@ pagers, transports, ) -from google.cloud.alloydb_v1beta.types import resources, service +from google.cloud.alloydb_v1beta.types import ( + csql_resources, + data_model, + gemini, + resources, + service, +) async def mock_async_gen(data, chunk_size=1): @@ -323,86 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AlloyDBAdminClient, transports.AlloyDBAdminGrpcTransport, "grpc"), - (AlloyDBAdminClient, transports.AlloyDBAdminRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1695,6 +1621,7 @@ def test_get_cluster(request_type, transport: str = "grpc"): etag="etag_value", reconciling=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) response = client.get_cluster(request) @@ -1716,6 +1643,7 @@ def test_get_cluster(request_type, transport: str = "grpc"): assert response.etag == "etag_value" assert response.reconciling is True assert response.satisfies_pzs is True + assert response.subscription_type == resources.SubscriptionType.STANDARD def test_get_cluster_non_empty_request_with_auto_populated_field(): @@ -1851,6 +1779,7 @@ async def test_get_cluster_async( etag="etag_value", reconciling=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) ) response = await client.get_cluster(request) @@ -1873,6 +1802,7 @@ async def test_get_cluster_async( assert response.etag == "etag_value" assert response.reconciling is True assert response.satisfies_pzs is True + assert response.subscription_type == resources.SubscriptionType.STANDARD @pytest.mark.asyncio @@ -2734,11 +2664,11 @@ async def test_update_cluster_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.DeleteClusterRequest, + service.UpgradeClusterRequest, dict, ], ) -def test_delete_cluster(request_type, transport: str = "grpc"): +def test_upgrade_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2749,22 +2679,22 @@ def test_delete_cluster(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_cluster(request) + response = client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_cluster_non_empty_request_with_auto_populated_field(): +def test_upgrade_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -2775,28 +2705,26 @@ def test_delete_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteClusterRequest( + request = service.UpgradeClusterRequest( name="name_value", - request_id="request_id_value", etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_cluster(request=request) + client.upgrade_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteClusterRequest( + assert args[0] == service.UpgradeClusterRequest( name="name_value", - request_id="request_id_value", etag="etag_value", ) -def test_delete_cluster_use_cached_wrapped_rpc(): +def test_upgrade_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2810,16 +2738,16 @@ def test_delete_cluster_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cluster in client._transport._wrapped_methods + assert client._transport.upgrade_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.upgrade_cluster] = mock_rpc request = {} - client.delete_cluster(request) + client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2829,7 +2757,7 @@ def test_delete_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_cluster(request) + client.upgrade_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2837,7 +2765,7 @@ def test_delete_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_cluster_async_use_cached_wrapped_rpc( +async def test_upgrade_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2854,7 +2782,7 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_cluster + client._client._transport.upgrade_cluster in client._client._transport._wrapped_methods ) @@ -2862,11 +2790,11 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_cluster + client._client._transport.upgrade_cluster ] = mock_rpc request = {} - await client.delete_cluster(request) + await client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2876,7 +2804,7 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_cluster(request) + await client.upgrade_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2884,8 +2812,8 @@ async def test_delete_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_cluster_async( - transport: str = "grpc_asyncio", request_type=service.DeleteClusterRequest +async def test_upgrade_cluster_async( + transport: str = "grpc_asyncio", request_type=service.UpgradeClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -2897,17 +2825,17 @@ async def test_delete_cluster_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_cluster(request) + response = await client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -2915,25 +2843,25 @@ async def test_delete_cluster_async( @pytest.mark.asyncio -async def test_delete_cluster_async_from_dict(): - await test_delete_cluster_async(request_type=dict) +async def test_upgrade_cluster_async_from_dict(): + await test_upgrade_cluster_async(request_type=dict) -def test_delete_cluster_field_headers(): +def test_upgrade_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_cluster(request) + client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2949,23 +2877,23 @@ def test_delete_cluster_field_headers(): @pytest.mark.asyncio -async def test_delete_cluster_field_headers_async(): +async def test_upgrade_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteClusterRequest() + request = service.UpgradeClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_cluster(request) + await client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2980,19 +2908,20 @@ async def test_delete_cluster_field_headers_async(): ) in kw["metadata"] -def test_delete_cluster_flattened(): +def test_upgrade_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_cluster( + client.upgrade_cluster( name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) # Establish that the underlying call was made with the expected @@ -3002,9 +2931,12 @@ def test_delete_cluster_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].version + mock_val = resources.DatabaseVersion.POSTGRES_13 + assert arg == mock_val -def test_delete_cluster_flattened_error(): +def test_upgrade_cluster_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3012,20 +2944,21 @@ def test_delete_cluster_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - service.DeleteClusterRequest(), + client.upgrade_cluster( + service.UpgradeClusterRequest(), name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) @pytest.mark.asyncio -async def test_delete_cluster_flattened_async(): +async def test_upgrade_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3034,8 +2967,9 @@ async def test_delete_cluster_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_cluster( + response = await client.upgrade_cluster( name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) # Establish that the underlying call was made with the expected @@ -3045,10 +2979,13 @@ async def test_delete_cluster_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].version + mock_val = resources.DatabaseVersion.POSTGRES_13 + assert arg == mock_val @pytest.mark.asyncio -async def test_delete_cluster_flattened_error_async(): +async def test_upgrade_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3056,20 +2993,21 @@ async def test_delete_cluster_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_cluster( - service.DeleteClusterRequest(), + await client.upgrade_cluster( + service.UpgradeClusterRequest(), name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) @pytest.mark.parametrize( "request_type", [ - service.PromoteClusterRequest, + service.DeleteClusterRequest, dict, ], ) -def test_promote_cluster(request_type, transport: str = "grpc"): +def test_delete_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3080,22 +3018,22 @@ def test_promote_cluster(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.promote_cluster(request) + response = client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_promote_cluster_non_empty_request_with_auto_populated_field(): +def test_delete_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -3106,28 +3044,28 @@ def test_promote_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.PromoteClusterRequest( + request = service.DeleteClusterRequest( name="name_value", request_id="request_id_value", etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.promote_cluster(request=request) + client.delete_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.PromoteClusterRequest( + assert args[0] == service.DeleteClusterRequest( name="name_value", request_id="request_id_value", etag="etag_value", ) -def test_promote_cluster_use_cached_wrapped_rpc(): +def test_delete_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3141,16 +3079,16 @@ def test_promote_cluster_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.promote_cluster in client._transport._wrapped_methods + assert client._transport.delete_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc request = {} - client.promote_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3160,7 +3098,7 @@ def test_promote_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.promote_cluster(request) + client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3168,7 +3106,7 @@ def test_promote_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_promote_cluster_async_use_cached_wrapped_rpc( +async def test_delete_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3185,7 +3123,7 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.promote_cluster + client._client._transport.delete_cluster in client._client._transport._wrapped_methods ) @@ -3193,11 +3131,11 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.promote_cluster + client._client._transport.delete_cluster ] = mock_rpc request = {} - await client.promote_cluster(request) + await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3207,7 +3145,7 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.promote_cluster(request) + await client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3215,8 +3153,8 @@ async def test_promote_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_promote_cluster_async( - transport: str = "grpc_asyncio", request_type=service.PromoteClusterRequest +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=service.DeleteClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -3228,17 +3166,17 @@ async def test_promote_cluster_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.promote_cluster(request) + response = await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3246,25 +3184,25 @@ async def test_promote_cluster_async( @pytest.mark.asyncio -async def test_promote_cluster_async_from_dict(): - await test_promote_cluster_async(request_type=dict) +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) -def test_promote_cluster_field_headers(): +def test_delete_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.promote_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3280,23 +3218,23 @@ def test_promote_cluster_field_headers(): @pytest.mark.asyncio -async def test_promote_cluster_field_headers_async(): +async def test_delete_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.PromoteClusterRequest() + request = service.DeleteClusterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.promote_cluster(request) + await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3311,18 +3249,18 @@ async def test_promote_cluster_field_headers_async(): ) in kw["metadata"] -def test_promote_cluster_flattened(): +def test_delete_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.promote_cluster( + client.delete_cluster( name="name_value", ) @@ -3335,7 +3273,7 @@ def test_promote_cluster_flattened(): assert arg == mock_val -def test_promote_cluster_flattened_error(): +def test_delete_cluster_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3343,20 +3281,20 @@ def test_promote_cluster_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.promote_cluster( - service.PromoteClusterRequest(), + client.delete_cluster( + service.DeleteClusterRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_promote_cluster_flattened_async(): +async def test_delete_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3365,7 +3303,7 @@ async def test_promote_cluster_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.promote_cluster( + response = await client.delete_cluster( name="name_value", ) @@ -3379,7 +3317,7 @@ async def test_promote_cluster_flattened_async(): @pytest.mark.asyncio -async def test_promote_cluster_flattened_error_async(): +async def test_delete_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3387,8 +3325,8 @@ async def test_promote_cluster_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.promote_cluster( - service.PromoteClusterRequest(), + await client.delete_cluster( + service.DeleteClusterRequest(), name="name_value", ) @@ -3396,11 +3334,11 @@ async def test_promote_cluster_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.RestoreClusterRequest, + service.PromoteClusterRequest, dict, ], ) -def test_restore_cluster(request_type, transport: str = "grpc"): +def test_promote_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3411,22 +3349,22 @@ def test_restore_cluster(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.restore_cluster(request) + response = client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_restore_cluster_non_empty_request_with_auto_populated_field(): +def test_promote_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -3437,28 +3375,28 @@ def test_restore_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.RestoreClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + request = service.PromoteClusterRequest( + name="name_value", request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.restore_cluster(request=request) + client.promote_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.RestoreClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + assert args[0] == service.PromoteClusterRequest( + name="name_value", request_id="request_id_value", + etag="etag_value", ) -def test_restore_cluster_use_cached_wrapped_rpc(): +def test_promote_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3472,16 +3410,16 @@ def test_restore_cluster_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_cluster in client._transport._wrapped_methods + assert client._transport.promote_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.restore_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc request = {} - client.restore_cluster(request) + client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3491,7 +3429,7 @@ def test_restore_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.restore_cluster(request) + client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3499,7 +3437,7 @@ def test_restore_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_restore_cluster_async_use_cached_wrapped_rpc( +async def test_promote_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3516,7 +3454,7 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.restore_cluster + client._client._transport.promote_cluster in client._client._transport._wrapped_methods ) @@ -3524,11 +3462,11 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.restore_cluster + client._client._transport.promote_cluster ] = mock_rpc request = {} - await client.restore_cluster(request) + await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3538,7 +3476,7 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.restore_cluster(request) + await client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3546,8 +3484,8 @@ async def test_restore_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_restore_cluster_async( - transport: str = "grpc_asyncio", request_type=service.RestoreClusterRequest +async def test_promote_cluster_async( + transport: str = "grpc_asyncio", request_type=service.PromoteClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -3559,17 +3497,17 @@ async def test_restore_cluster_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.restore_cluster(request) + response = await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3577,25 +3515,25 @@ async def test_restore_cluster_async( @pytest.mark.asyncio -async def test_restore_cluster_async_from_dict(): - await test_restore_cluster_async(request_type=dict) +async def test_promote_cluster_async_from_dict(): + await test_promote_cluster_async(request_type=dict) -def test_restore_cluster_field_headers(): +def test_promote_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_cluster(request) + client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3606,28 +3544,28 @@ def test_restore_cluster_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_restore_cluster_field_headers_async(): +async def test_promote_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.RestoreClusterRequest() + request = service.PromoteClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.restore_cluster(request) + await client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3638,18 +3576,100 @@ async def test_restore_cluster_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] +def test_promote_cluster_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_promote_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.promote_cluster( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_promote_cluster_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.promote_cluster( + service.PromoteClusterRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ - service.CreateSecondaryClusterRequest, + service.SwitchoverClusterRequest, dict, ], ) -def test_create_secondary_cluster(request_type, transport: str = "grpc"): +def test_switchover_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3661,23 +3681,23 @@ def test_create_secondary_cluster(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_secondary_cluster(request) + response = client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_secondary_cluster_non_empty_request_with_auto_populated_field(): +def test_switchover_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -3688,30 +3708,28 @@ def test_create_secondary_cluster_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateSecondaryClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + request = service.SwitchoverClusterRequest( + name="name_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_secondary_cluster(request=request) + client.switchover_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateSecondaryClusterRequest( - parent="parent_value", - cluster_id="cluster_id_value", + assert args[0] == service.SwitchoverClusterRequest( + name="name_value", request_id="request_id_value", ) -def test_create_secondary_cluster_use_cached_wrapped_rpc(): +def test_switchover_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3726,8 +3744,7 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_secondary_cluster - in client._transport._wrapped_methods + client._transport.switchover_cluster in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -3736,10 +3753,10 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_secondary_cluster + client._transport.switchover_cluster ] = mock_rpc request = {} - client.create_secondary_cluster(request) + client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3749,7 +3766,7 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_secondary_cluster(request) + client.switchover_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3757,7 +3774,7 @@ def test_create_secondary_cluster_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( +async def test_switchover_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3774,7 +3791,7 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_secondary_cluster + client._client._transport.switchover_cluster in client._client._transport._wrapped_methods ) @@ -3782,11 +3799,11 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_secondary_cluster + client._client._transport.switchover_cluster ] = mock_rpc request = {} - await client.create_secondary_cluster(request) + await client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3796,7 +3813,7 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_secondary_cluster(request) + await client.switchover_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3804,8 +3821,8 @@ async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_secondary_cluster_async( - transport: str = "grpc_asyncio", request_type=service.CreateSecondaryClusterRequest +async def test_switchover_cluster_async( + transport: str = "grpc_asyncio", request_type=service.SwitchoverClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -3818,18 +3835,18 @@ async def test_create_secondary_cluster_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_secondary_cluster(request) + response = await client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -3837,27 +3854,27 @@ async def test_create_secondary_cluster_async( @pytest.mark.asyncio -async def test_create_secondary_cluster_async_from_dict(): - await test_create_secondary_cluster_async(request_type=dict) +async def test_switchover_cluster_async_from_dict(): + await test_switchover_cluster_async(request_type=dict) -def test_create_secondary_cluster_field_headers(): +def test_switchover_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_secondary_cluster(request) + client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3868,30 +3885,30 @@ def test_create_secondary_cluster_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_secondary_cluster_field_headers_async(): +async def test_switchover_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryClusterRequest() + request = service.SwitchoverClusterRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_secondary_cluster(request) + await client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3902,49 +3919,37 @@ async def test_create_secondary_cluster_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_secondary_cluster_flattened(): +def test_switchover_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_secondary_cluster( - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + client.switchover_cluster( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].cluster - mock_val = resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ) - assert arg == mock_val - arg = args[0].cluster_id - mock_val = "cluster_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_secondary_cluster_flattened_error(): +def test_switchover_cluster_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3952,25 +3957,21 @@ def test_create_secondary_cluster_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_secondary_cluster( - service.CreateSecondaryClusterRequest(), - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + client.switchover_cluster( + service.SwitchoverClusterRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_secondary_cluster_flattened_async(): +async def test_switchover_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_secondary_cluster), "__call__" + type(client.transport.switchover_cluster), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3980,33 +3981,21 @@ async def test_create_secondary_cluster_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_secondary_cluster( - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + response = await client.switchover_cluster( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].cluster - mock_val = resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ) - assert arg == mock_val - arg = args[0].cluster_id - mock_val = "cluster_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_secondary_cluster_flattened_error_async(): +async def test_switchover_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4014,24 +4003,20 @@ async def test_create_secondary_cluster_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_secondary_cluster( - service.CreateSecondaryClusterRequest(), - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + await client.switchover_cluster( + service.SwitchoverClusterRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.ListInstancesRequest, + service.RestoreClusterRequest, dict, ], ) -def test_list_instances(request_type, transport: str = "grpc"): +def test_restore_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4042,27 +4027,22 @@ def test_list_instances(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_instances(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_instances_non_empty_request_with_auto_populated_field(): +def test_restore_cluster_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -4073,30 +4053,28 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListInstancesRequest( + request = service.RestoreClusterRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + cluster_id="cluster_id_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_instances(request=request) + client.restore_cluster(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListInstancesRequest( + assert args[0] == service.RestoreClusterRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + cluster_id="cluster_id_value", + request_id="request_id_value", ) -def test_list_instances_use_cached_wrapped_rpc(): +def test_restore_cluster_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4110,21 +4088,26 @@ def test_list_instances_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods + assert client._transport.restore_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + client._transport._wrapped_methods[client._transport.restore_cluster] = mock_rpc request = {} - client.list_instances(request) + client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_instances(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4132,7 +4115,7 @@ def test_list_instances_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_instances_async_use_cached_wrapped_rpc( +async def test_restore_cluster_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4149,7 +4132,7 @@ async def test_list_instances_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_instances + client._client._transport.restore_cluster in client._client._transport._wrapped_methods ) @@ -4157,16 +4140,21 @@ async def test_list_instances_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_instances + client._client._transport.restore_cluster ] = mock_rpc request = {} - await client.list_instances(request) + await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_instances(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4174,8 +4162,8 @@ async def test_list_instances_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_instances_async( - transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest +async def test_restore_cluster_async( + transport: str = "grpc_asyncio", request_type=service.RestoreClusterRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -4187,48 +4175,43 @@ async def test_list_instances_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_instances(request) + response = await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) +async def test_restore_cluster_async_from_dict(): + await test_restore_cluster_async(request_type=dict) -def test_list_instances_field_headers(): +def test_restore_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - call.return_value = service.ListInstancesResponse() - client.list_instances(request) + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4244,23 +4227,23 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio -async def test_list_instances_field_headers_async(): +async def test_restore_cluster_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListInstancesRequest() + request = service.RestoreClusterRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object(type(client.transport.restore_cluster), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListInstancesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_instances(request) + await client.restore_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4275,290 +4258,396 @@ async def test_list_instances_field_headers_async(): ) in kw["metadata"] -def test_list_instances_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSecondaryClusterRequest, + dict, + ], +) +def test_create_secondary_cluster(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent="parent_value", - ) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_secondary_cluster(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + request = service.CreateSecondaryClusterRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_instances_flattened_error(): + +def test_create_secondary_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - service.ListInstancesRequest(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateSecondaryClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_secondary_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSecondaryClusterRequest( parent="parent_value", + cluster_id="cluster_id_value", + request_id="request_id_value", + ) + + +def test_create_secondary_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_secondary_cluster + in client._transport._wrapped_methods ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_secondary_cluster + ] = mock_rpc + request = {} + client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_secondary_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_instances_flattened_async(): +async def test_create_secondary_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_secondary_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_secondary_cluster + ] = mock_rpc + + request = {} + await client.create_secondary_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_secondary_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryClusterRequest +): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListInstancesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListInstancesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent="parent_value", + operations_pb2.Operation(name="operations/spam") ) + response = await client.create_secondary_cluster(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + request = service.CreateSecondaryClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - service.ListInstancesRequest(), - parent="parent_value", - ) +async def test_create_secondary_cluster_async_from_dict(): + await test_create_secondary_cluster_async(request_type=dict) -def test_list_instances_pager(transport_name: str = "grpc"): +def test_create_secondary_cluster_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], - ), - RuntimeError, - ) + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_secondary_cluster(request) - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_secondary_cluster_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateSecondaryClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - pager = client.list_instances(request={}, retry=retry, timeout=timeout) + await client.create_secondary_cluster(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Instance) for i in results) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_list_instances_pages(transport_name: str = "grpc"): +def test_create_secondary_cluster_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", + with mock.patch.object( + type(client.transport.create_secondary_cluster), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_create_secondary_cluster_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), - RuntimeError, + cluster_id="cluster_id_value", ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_instances_async_pager(): +async def test_create_secondary_cluster_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + type(client.transport.create_secondary_cluster), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - async_pager = await client.list_instances( - request={}, + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_cluster( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Instance) for i in responses) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ) + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_list_instances_async_pages(): +async def test_create_secondary_cluster_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), - RuntimeError, + cluster_id="cluster_id_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - service.GetInstanceRequest, + service.ListInstancesRequest, dict, ], ) -def test_get_instance(request_type, transport: str = "grpc"): +def test_list_instances(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4569,51 +4658,27 @@ def test_get_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Instance( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Instance.State.READY, - instance_type=resources.Instance.InstanceType.PRIMARY, - availability_type=resources.Instance.AvailabilityType.ZONAL, - gce_zone="gce_zone_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", - reconciling=True, - etag="etag_value", - satisfies_pzs=True, - outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], + call.return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.get_instance(request) + response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Instance.State.READY - assert response.instance_type == resources.Instance.InstanceType.PRIMARY - assert response.availability_type == resources.Instance.AvailabilityType.ZONAL - assert response.gce_zone == "gce_zone_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.satisfies_pzs is True - assert response.outbound_public_ip_addresses == [ - "outbound_public_ip_addresses_value" - ] + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_instance_non_empty_request_with_auto_populated_field(): +def test_list_instances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -4624,24 +4689,30 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetInstanceRequest( - name="name_value", + request = service.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_instance(request=request) + client.list_instances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetInstanceRequest( - name="name_value", + assert args[0] == service.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_instance_use_cached_wrapped_rpc(): +def test_list_instances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4655,21 +4726,21 @@ def test_get_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods + assert client._transport.list_instances in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} - client.get_instance(request) + client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_instance(request) + client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4677,7 +4748,7 @@ def test_get_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_instance_async_use_cached_wrapped_rpc( +async def test_list_instances_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4694,7 +4765,7 @@ async def test_get_instance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_instance + client._client._transport.list_instances in client._client._transport._wrapped_methods ) @@ -4702,16 +4773,16 @@ async def test_get_instance_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_instance + client._client._transport.list_instances ] = mock_rpc request = {} - await client.get_instance(request) + await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_instance(request) + await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4719,8 +4790,8 @@ async def test_get_instance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_instance_async( - transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -4732,72 +4803,48 @@ async def test_get_instance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Instance( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Instance.State.READY, - instance_type=resources.Instance.InstanceType.PRIMARY, - availability_type=resources.Instance.AvailabilityType.ZONAL, - gce_zone="gce_zone_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", - reconciling=True, - etag="etag_value", - satisfies_pzs=True, - outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], + service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_instance(request) + response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Instance.State.READY - assert response.instance_type == resources.Instance.InstanceType.PRIMARY - assert response.availability_type == resources.Instance.AvailabilityType.ZONAL - assert response.gce_zone == "gce_zone_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.satisfies_pzs is True - assert response.outbound_public_ip_addresses == [ - "outbound_public_ip_addresses_value" - ] + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) -def test_get_instance_field_headers(): +def test_list_instances_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = resources.Instance() - client.get_instance(request) + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = service.ListInstancesResponse() + client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4808,26 +4855,28 @@ def test_get_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_instance_field_headers_async(): +async def test_list_instances_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetInstanceRequest() + request = service.ListInstancesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) - await client.get_instance(request) + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4838,35 +4887,35 @@ async def test_get_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_instance_flattened(): +def test_list_instances_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Instance() + call.return_value = service.ListInstancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_instance( - name="name_value", + client.list_instances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_instance_flattened_error(): +def test_list_instances_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4874,41 +4923,43 @@ def test_get_instance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_instance( - service.GetInstanceRequest(), - name="name_value", + client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_instance_flattened_async(): +async def test_list_instances_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Instance() + call.return_value = service.ListInstancesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_instance( - name="name_value", + response = await client.list_instances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): +async def test_list_instances_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4916,20 +4967,214 @@ async def test_get_instance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_instance( - service.GetInstanceRequest(), - name="name_value", - ) - + await client.list_instances( + service.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + +def test_list_instances_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.CreateInstanceRequest, + service.GetInstanceRequest, dict, ], ) -def test_create_instance(request_type, transport: str = "grpc"): +def test_get_instance(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4940,22 +5185,51 @@ def test_create_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) + call.return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + reconciling=True, + etag="etag_value", + satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], + ) + response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] -def test_create_instance_non_empty_request_with_auto_populated_field(): +def test_get_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -4966,28 +5240,24 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - request_id="request_id_value", + request = service.GetInstanceRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_instance(request=request) + client.get_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - request_id="request_id_value", + assert args[0] == service.GetInstanceRequest( + name="name_value", ) -def test_create_instance_use_cached_wrapped_rpc(): +def test_get_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5001,26 +5271,21 @@ def test_create_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_instance in client._transport._wrapped_methods + assert client._transport.get_instance in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} - client.create_instance(request) + client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance(request) + client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5028,7 +5293,7 @@ def test_create_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_instance_async_use_cached_wrapped_rpc( +async def test_get_instance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5045,7 +5310,7 @@ async def test_create_instance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_instance + client._client._transport.get_instance in client._client._transport._wrapped_methods ) @@ -5053,21 +5318,16 @@ async def test_create_instance_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_instance + client._client._transport.get_instance ] = mock_rpc request = {} - await client.create_instance(request) + await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance(request) + await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5075,8 +5335,8 @@ async def test_create_instance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_instance_async( - transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -5088,43 +5348,72 @@ async def test_create_instance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + reconciling=True, + etag="etag_value", + satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], + ) ) - response = await client.create_instance(request) + response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] @pytest.mark.asyncio -async def test_create_instance_async_from_dict(): - await test_create_instance_async(request_type=dict) +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) -def test_create_instance_field_headers(): +def test_get_instance_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = resources.Instance() + client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5135,28 +5424,26 @@ def test_create_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_instance_field_headers_async(): +async def test_get_instance_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateInstanceRequest() + request = service.GetInstanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_instance(request) + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) + await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5167,43 +5454,35 @@ async def test_create_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_instance_flattened(): +def test_get_instance_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Instance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_instance( - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + client.get_instance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance - mock_val = resources.Instance(name="name_value") - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_instance_flattened_error(): +def test_get_instance_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5211,53 +5490,41 @@ def test_create_instance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_instance( - service.CreateInstanceRequest(), - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + client.get_instance( + service.GetInstanceRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_instance_flattened_async(): +async def test_get_instance_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Instance() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Instance()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_instance( - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + response = await client.get_instance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance - mock_val = resources.Instance(name="name_value") - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_instance_flattened_error_async(): +async def test_get_instance_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5265,22 +5532,20 @@ async def test_create_instance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_instance( - service.CreateInstanceRequest(), - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", + await client.get_instance( + service.GetInstanceRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.CreateSecondaryInstanceRequest, + service.CreateInstanceRequest, dict, ], ) -def test_create_secondary_instance(request_type, transport: str = "grpc"): +def test_create_instance(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5291,24 +5556,22 @@ def test_create_secondary_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_secondary_instance(request) + response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_secondary_instance_non_empty_request_with_auto_populated_field(): +def test_create_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -5319,30 +5582,28 @@ def test_create_secondary_instance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.CreateSecondaryInstanceRequest( + request = service.CreateInstanceRequest( parent="parent_value", instance_id="instance_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_secondary_instance(request=request) + client.create_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateSecondaryInstanceRequest( + assert args[0] == service.CreateInstanceRequest( parent="parent_value", instance_id="instance_id_value", request_id="request_id_value", ) -def test_create_secondary_instance_use_cached_wrapped_rpc(): +def test_create_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5356,21 +5617,16 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_secondary_instance - in client._transport._wrapped_methods - ) + assert client._transport.create_instance in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_secondary_instance - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} - client.create_secondary_instance(request) + client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5380,7 +5636,7 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_secondary_instance(request) + client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5388,7 +5644,7 @@ def test_create_secondary_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_secondary_instance_async_use_cached_wrapped_rpc( +async def test_create_instance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5405,7 +5661,7 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_secondary_instance + client._client._transport.create_instance in client._client._transport._wrapped_methods ) @@ -5413,11 +5669,11 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_secondary_instance + client._client._transport.create_instance ] = mock_rpc request = {} - await client.create_secondary_instance(request) + await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5427,7 +5683,7 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_secondary_instance(request) + await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5435,8 +5691,8 @@ async def test_create_secondary_instance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_secondary_instance_async( - transport: str = "grpc_asyncio", request_type=service.CreateSecondaryInstanceRequest +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -5448,19 +5704,17 @@ async def test_create_secondary_instance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_secondary_instance(request) + response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -5468,27 +5722,25 @@ async def test_create_secondary_instance_async( @pytest.mark.asyncio -async def test_create_secondary_instance_async_from_dict(): - await test_create_secondary_instance_async(request_type=dict) +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) -def test_create_secondary_instance_field_headers(): +def test_create_instance_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_secondary_instance(request) + client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5504,25 +5756,23 @@ def test_create_secondary_instance_field_headers(): @pytest.mark.asyncio -async def test_create_secondary_instance_field_headers_async(): +async def test_create_instance_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateSecondaryInstanceRequest() + request = service.CreateInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_secondary_instance(request) + await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5537,20 +5787,18 @@ async def test_create_secondary_instance_field_headers_async(): ) in kw["metadata"] -def test_create_secondary_instance_flattened(): +def test_create_instance_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_secondary_instance( + client.create_instance( parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5571,7 +5819,7 @@ def test_create_secondary_instance_flattened(): assert arg == mock_val -def test_create_secondary_instance_flattened_error(): +def test_create_instance_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5579,8 +5827,8 @@ def test_create_secondary_instance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_secondary_instance( - service.CreateSecondaryInstanceRequest(), + client.create_instance( + service.CreateInstanceRequest(), parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5588,15 +5836,13 @@ def test_create_secondary_instance_flattened_error(): @pytest.mark.asyncio -async def test_create_secondary_instance_flattened_async(): +async def test_create_instance_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_secondary_instance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -5605,7 +5851,7 @@ async def test_create_secondary_instance_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_secondary_instance( + response = await client.create_instance( parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5627,7 +5873,7 @@ async def test_create_secondary_instance_flattened_async(): @pytest.mark.asyncio -async def test_create_secondary_instance_flattened_error_async(): +async def test_create_instance_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5635,8 +5881,8 @@ async def test_create_secondary_instance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_secondary_instance( - service.CreateSecondaryInstanceRequest(), + await client.create_instance( + service.CreateInstanceRequest(), parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", @@ -5646,11 +5892,11 @@ async def test_create_secondary_instance_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.BatchCreateInstancesRequest, + service.CreateSecondaryInstanceRequest, dict, ], ) -def test_batch_create_instances(request_type, transport: str = "grpc"): +def test_create_secondary_instance(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5662,23 +5908,23 @@ def test_batch_create_instances(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.batch_create_instances(request) + response = client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_batch_create_instances_non_empty_request_with_auto_populated_field(): +def test_create_secondary_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -5689,28 +5935,30 @@ def test_batch_create_instances_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.BatchCreateInstancesRequest( + request = service.CreateSecondaryInstanceRequest( parent="parent_value", + instance_id="instance_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.batch_create_instances(request=request) + client.create_secondary_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.BatchCreateInstancesRequest( + assert args[0] == service.CreateSecondaryInstanceRequest( parent="parent_value", + instance_id="instance_id_value", request_id="request_id_value", ) -def test_batch_create_instances_use_cached_wrapped_rpc(): +def test_create_secondary_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5725,7 +5973,7 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.batch_create_instances + client._transport.create_secondary_instance in client._transport._wrapped_methods ) @@ -5735,10 +5983,10 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_create_instances + client._transport.create_secondary_instance ] = mock_rpc request = {} - client.batch_create_instances(request) + client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5748,7 +5996,7 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.batch_create_instances(request) + client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5756,7 +6004,7 @@ def test_batch_create_instances_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_batch_create_instances_async_use_cached_wrapped_rpc( +async def test_create_secondary_instance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5773,7 +6021,7 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.batch_create_instances + client._client._transport.create_secondary_instance in client._client._transport._wrapped_methods ) @@ -5781,11 +6029,11 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.batch_create_instances + client._client._transport.create_secondary_instance ] = mock_rpc request = {} - await client.batch_create_instances(request) + await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5795,7 +6043,7 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.batch_create_instances(request) + await client.create_secondary_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5803,8 +6051,8 @@ async def test_batch_create_instances_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_batch_create_instances_async( - transport: str = "grpc_asyncio", request_type=service.BatchCreateInstancesRequest +async def test_create_secondary_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateSecondaryInstanceRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -5817,18 +6065,18 @@ async def test_batch_create_instances_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.batch_create_instances(request) + response = await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -5836,27 +6084,27 @@ async def test_batch_create_instances_async( @pytest.mark.asyncio -async def test_batch_create_instances_async_from_dict(): - await test_batch_create_instances_async(request_type=dict) +async def test_create_secondary_instance_async_from_dict(): + await test_create_secondary_instance_async(request_type=dict) -def test_batch_create_instances_field_headers(): +def test_create_secondary_instance_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.batch_create_instances(request) + client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5872,25 +6120,25 @@ def test_batch_create_instances_field_headers(): @pytest.mark.asyncio -async def test_batch_create_instances_field_headers_async(): +async def test_create_secondary_instance_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.BatchCreateInstancesRequest() + request = service.CreateSecondaryInstanceRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_create_instances), "__call__" + type(client.transport.create_secondary_instance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.batch_create_instances(request) + await client.create_secondary_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5905,14 +6153,120 @@ async def test_batch_create_instances_field_headers_async(): ) in kw["metadata"] +def test_create_secondary_instance_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +def test_create_secondary_instance_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_secondary_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_secondary_instance( + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = resources.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_secondary_instance_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + @pytest.mark.parametrize( "request_type", [ - service.UpdateInstanceRequest, + service.BatchCreateInstancesRequest, dict, ], ) -def test_update_instance(request_type, transport: str = "grpc"): +def test_batch_create_instances(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5923,22 +6277,24 @@ def test_update_instance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) + response = client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateInstanceRequest() + request = service.BatchCreateInstancesRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_instance_non_empty_request_with_auto_populated_field(): +def test_batch_create_instances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -5949,24 +6305,28 @@ def test_update_instance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateInstanceRequest( + request = service.BatchCreateInstancesRequest( + parent="parent_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_instance(request=request) + client.batch_create_instances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateInstanceRequest( + assert args[0] == service.BatchCreateInstancesRequest( + parent="parent_value", request_id="request_id_value", ) -def test_update_instance_use_cached_wrapped_rpc(): +def test_batch_create_instances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5980,16 +6340,21 @@ def test_update_instance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods + assert ( + client._transport.batch_create_instances + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_create_instances + ] = mock_rpc request = {} - client.update_instance(request) + client.batch_create_instances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5999,7 +6364,7 @@ def test_update_instance_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_instance(request) + client.batch_create_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6007,7 +6372,258 @@ def test_update_instance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_instance_async_use_cached_wrapped_rpc( +async def test_batch_create_instances_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_create_instances + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_create_instances + ] = mock_rpc + + request = {} + await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.batch_create_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_create_instances_async( + transport: str = "grpc_asyncio", request_type=service.BatchCreateInstancesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.BatchCreateInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_instances_async_from_dict(): + await test_batch_create_instances_async(request_type=dict) + + +def test_batch_create_instances_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_instances_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.BatchCreateInstancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_instances), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.UpdateInstanceRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceRequest( + request_id="request_id_value", + ) + + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7591,11 +8207,11 @@ async def test_restart_instance_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.ListBackupsRequest, + service.ExecuteSqlRequest, dict, ], ) -def test_list_backups(request_type, transport: str = "grpc"): +def test_execute_sql(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7606,27 +8222,22 @@ def test_list_backups(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_backups(request) + call.return_value = service.ExecuteSqlResponse() + response = client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, service.ExecuteSqlResponse) -def test_list_backups_non_empty_request_with_auto_populated_field(): +def test_execute_sql_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -7637,30 +8248,32 @@ def test_list_backups_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListBackupsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = service.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_backups(request=request) + client.execute_sql(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListBackupsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == service.ExecuteSqlRequest( + password="password_value", + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", ) -def test_list_backups_use_cached_wrapped_rpc(): +def test_execute_sql_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7674,21 +8287,21 @@ def test_list_backups_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods + assert client._transport.execute_sql in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc request = {} - client.list_backups(request) + client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backups(request) + client.execute_sql(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7696,7 +8309,7 @@ def test_list_backups_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_backups_async_use_cached_wrapped_rpc( +async def test_execute_sql_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7713,7 +8326,7 @@ async def test_list_backups_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_backups + client._client._transport.execute_sql in client._client._transport._wrapped_methods ) @@ -7721,16 +8334,16 @@ async def test_list_backups_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_backups + client._client._transport.execute_sql ] = mock_rpc request = {} - await client.list_backups(request) + await client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_backups(request) + await client.execute_sql(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7738,8 +8351,8 @@ async def test_list_backups_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_backups_async( - transport: str = "grpc_asyncio", request_type=service.ListBackupsRequest +async def test_execute_sql_async( + transport: str = "grpc_asyncio", request_type=service.ExecuteSqlRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -7751,48 +8364,43 @@ async def test_list_backups_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + service.ExecuteSqlResponse() ) - response = await client.list_backups(request) + response = await client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, service.ExecuteSqlResponse) @pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) +async def test_execute_sql_async_from_dict(): + await test_execute_sql_async(request_type=dict) -def test_list_backups_field_headers(): +def test_execute_sql_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() - request.parent = "parent_value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value = service.ListBackupsResponse() - client.list_backups(request) + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + call.return_value = service.ExecuteSqlResponse() + client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7803,28 +8411,28 @@ def test_list_backups_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "instance=instance_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_backups_field_headers_async(): +async def test_execute_sql_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListBackupsRequest() + request = service.ExecuteSqlRequest() - request.parent = "parent_value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListBackupsResponse() + service.ExecuteSqlResponse() ) - await client.list_backups(request) + await client.execute_sql(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7835,35 +8443,49 @@ async def test_list_backups_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "instance=instance_value", ) in kw["metadata"] -def test_list_backups_flattened(): +def test_execute_sql_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListBackupsResponse() + call.return_value = service.ExecuteSqlResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backups( - parent="parent_value", + client.execute_sql( + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].database + mock_val = "database_value" + assert arg == mock_val + arg = args[0].user + mock_val = "user_value" assert arg == mock_val + arg = args[0].sql_statement + mock_val = "sql_statement_value" + assert arg == mock_val + assert args[0].password == "password_value" -def test_list_backups_flattened_error(): +def test_execute_sql_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7871,43 +8493,61 @@ def test_list_backups_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - service.ListBackupsRequest(), - parent="parent_value", + client.execute_sql( + service.ExecuteSqlRequest(), + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) @pytest.mark.asyncio -async def test_list_backups_flattened_async(): +async def test_execute_sql_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListBackupsResponse() + call.return_value = service.ExecuteSqlResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListBackupsResponse() + service.ExecuteSqlResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backups( - parent="parent_value", + response = await client.execute_sql( + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].instance + mock_val = "instance_value" assert arg == mock_val + arg = args[0].database + mock_val = "database_value" + assert arg == mock_val + arg = args[0].user + mock_val = "user_value" + assert arg == mock_val + arg = args[0].sql_statement + mock_val = "sql_statement_value" + assert arg == mock_val + assert args[0].password == "password_value" @pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): +async def test_execute_sql_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7915,323 +8555,117 @@ async def test_list_backups_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backups( - service.ListBackupsRequest(), - parent="parent_value", + await client.execute_sql( + service.ExecuteSqlRequest(), + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", ) -def test_list_backups_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_backups(request={}, retry=retry, timeout=timeout) + response = client.list_backups(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListBackupsRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Backup) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_backups_pages(transport_name: str = "grpc"): +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - pages = list(client.list_backups(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_backups_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backups( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Backup) for i in responses) - - -@pytest.mark.asyncio -async def test_list_backups_async_pages(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetBackupRequest, - dict, - ], -) -def test_get_backup(request_type, transport: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Backup( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Backup.State.READY, - type_=resources.Backup.Type.ON_DEMAND, - description="description_value", - cluster_uid="cluster_uid_value", - cluster_name="cluster_name_value", - reconciling=True, - etag="etag_value", - size_bytes=1089, - satisfies_pzs=True, - database_version=resources.DatabaseVersion.POSTGRES_13, - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Backup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Backup.State.READY - assert response.type_ == resources.Backup.Type.ON_DEMAND - assert response.description == "description_value" - assert response.cluster_uid == "cluster_uid_value" - assert response.cluster_name == "cluster_name_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.size_bytes == 1089 - assert response.satisfies_pzs is True - assert response.database_version == resources.DatabaseVersion.POSTGRES_13 - - -def test_get_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetBackupRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetBackupRequest( - name="name_value", - ) - - -def test_get_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods + assert client._transport.list_backups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc request = {} - client.get_backup(request) + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup(request) + client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8239,7 +8673,9 @@ def test_get_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_backups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8254,7 +8690,7 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.get_backup + client._client._transport.list_backups in client._client._transport._wrapped_methods ) @@ -8262,16 +8698,16 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup + client._client._transport.list_backups ] = mock_rpc request = {} - await client.get_backup(request) + await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup(request) + await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8279,8 +8715,8 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_get_backup_async( - transport: str = "grpc_asyncio", request_type=service.GetBackupRequest +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=service.ListBackupsRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -8292,70 +8728,48 @@ async def test_get_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Backup( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Backup.State.READY, - type_=resources.Backup.Type.ON_DEMAND, - description="description_value", - cluster_uid="cluster_uid_value", - cluster_name="cluster_name_value", - reconciling=True, - etag="etag_value", - size_bytes=1089, - satisfies_pzs=True, - database_version=resources.DatabaseVersion.POSTGRES_13, + service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_backup(request) + response = await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetBackupRequest() + request = service.ListBackupsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.Backup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Backup.State.READY - assert response.type_ == resources.Backup.Type.ON_DEMAND - assert response.description == "description_value" - assert response.cluster_uid == "cluster_uid_value" - assert response.cluster_name == "cluster_name_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.size_bytes == 1089 - assert response.satisfies_pzs is True - assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) -def test_get_backup_field_headers(): +def test_list_backups_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetBackupRequest() + request = service.ListBackupsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = resources.Backup() - client.get_backup(request) + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = service.ListBackupsResponse() + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8366,26 +8780,28 @@ def test_get_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_backup_field_headers_async(): +async def test_list_backups_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetBackupRequest() + request = service.ListBackupsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) - await client.get_backup(request) + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) + await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8396,35 +8812,35 @@ async def test_get_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_backup_flattened(): +def test_list_backups_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Backup() + call.return_value = service.ListBackupsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup( - name="name_value", + client.list_backups( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_backup_flattened_error(): +def test_list_backups_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8432,41 +8848,43 @@ def test_get_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - service.GetBackupRequest(), - name="name_value", + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_backup_flattened_async(): +async def test_list_backups_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.Backup() + call.return_value = service.ListBackupsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListBackupsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup( - name="name_value", + response = await client.list_backups( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): +async def test_list_backups_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8474,111 +8892,323 @@ async def test_get_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup( - service.GetBackupRequest(), - name="name_value", + await client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.CreateBackupRequest, - dict, - ], -) -def test_create_backup(request_type, transport: str = "grpc"): +def test_list_backups_pager(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup(request) + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateBackupRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) -def test_create_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_list_backups_pages(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - request_id="request_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, ) - client.create_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - request_id="request_id_value", + async_pager = await client.list_backups( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, resources.Backup) for i in responses) -def test_create_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc - request = {} - client.create_backup(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + satisfies_pzs=True, + database_version=resources.DatabaseVersion.POSTGRES_13, + ) + response = client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetBackupRequest() + assert args[0] == request - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + assert response.satisfies_pzs is True + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() - client.create_backup(request) + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8586,9 +9216,7 @@ def test_create_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_backup_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8603,7 +9231,7 @@ async def test_create_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_backup + client._client._transport.get_backup in client._client._transport._wrapped_methods ) @@ -8611,21 +9239,16 @@ async def test_create_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_backup + client._client._transport.get_backup ] = mock_rpc request = {} - await client.create_backup(request) + await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup(request) + await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8633,8 +9256,8 @@ async def test_create_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_backup_async( - transport: str = "grpc_asyncio", request_type=service.CreateBackupRequest +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=service.GetBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -8646,43 +9269,70 @@ async def test_create_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + satisfies_pzs=True, + database_version=resources.DatabaseVersion.POSTGRES_13, + ) ) - response = await client.create_backup(request) + response = await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateBackupRequest() + request = service.GetBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + assert response.satisfies_pzs is True + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 @pytest.mark.asyncio -async def test_create_backup_async_from_dict(): - await test_create_backup_async(request_type=dict) +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) -def test_create_backup_field_headers(): +def test_get_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateBackupRequest() + request = service.GetBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = resources.Backup() + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8693,28 +9343,26 @@ def test_create_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_backup_field_headers_async(): +async def test_get_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateBackupRequest() + request = service.GetBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_backup(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) + await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8725,43 +9373,35 @@ async def test_create_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_backup_flattened(): +def test_get_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Backup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_backup( - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + client.get_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup - mock_val = resources.Backup(name="name_value") - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_backup_flattened_error(): +def test_get_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8769,53 +9409,41 @@ def test_create_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup( - service.CreateBackupRequest(), - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + client.get_backup( + service.GetBackupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_backup_flattened_async(): +async def test_get_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resources.Backup() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Backup()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_backup( - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + response = await client.get_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup - mock_val = resources.Backup(name="name_value") - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_backup_flattened_error_async(): +async def test_get_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8823,22 +9451,20 @@ async def test_create_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_backup( - service.CreateBackupRequest(), - parent="parent_value", - backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + await client.get_backup( + service.GetBackupRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateBackupRequest, + service.CreateBackupRequest, dict, ], ) -def test_update_backup(request_type, transport: str = "grpc"): +def test_create_backup(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8849,22 +9475,22 @@ def test_update_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_backup(request) + response = client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_backup_non_empty_request_with_auto_populated_field(): +def test_create_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -8875,24 +9501,28 @@ def test_update_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateBackupRequest( + request = service.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_backup(request=request) + client.create_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateBackupRequest( + assert args[0] == service.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", request_id="request_id_value", ) -def test_update_backup_use_cached_wrapped_rpc(): +def test_create_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8906,16 +9536,16 @@ def test_update_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods + assert client._transport.create_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc request = {} - client.update_backup(request) + client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8925,7 +9555,7 @@ def test_update_backup_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup(request) + client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8933,7 +9563,7 @@ def test_update_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_backup_async_use_cached_wrapped_rpc( +async def test_create_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8950,7 +9580,7 @@ async def test_update_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_backup + client._client._transport.create_backup in client._client._transport._wrapped_methods ) @@ -8958,11 +9588,11 @@ async def test_update_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_backup + client._client._transport.create_backup ] = mock_rpc request = {} - await client.update_backup(request) + await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8972,7 +9602,7 @@ async def test_update_backup_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_backup(request) + await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8980,8 +9610,8 @@ async def test_update_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_backup_async( - transport: str = "grpc_asyncio", request_type=service.UpdateBackupRequest +async def test_create_backup_async( + transport: str = "grpc_asyncio", request_type=service.CreateBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -8993,17 +9623,17 @@ async def test_update_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_backup(request) + response = await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -9011,25 +9641,25 @@ async def test_update_backup_async( @pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) -def test_update_backup_field_headers(): +def test_create_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() - request.backup.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_backup(request) + client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9040,28 +9670,28 @@ def test_update_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_backup_field_headers_async(): +async def test_create_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateBackupRequest() + request = service.CreateBackupRequest() - request.backup.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_backup(request) + await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9072,39 +9702,43 @@ async def test_update_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_backup_flattened(): +def test_create_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_backup( + client.create_backup( + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup mock_val = resources.Backup(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_id + mock_val = "backup_id_value" assert arg == mock_val -def test_update_backup_flattened_error(): +def test_create_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9112,21 +9746,22 @@ def test_update_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - service.UpdateBackupRequest(), + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) @pytest.mark.asyncio -async def test_update_backup_flattened_async(): +async def test_create_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9135,25 +9770,29 @@ async def test_update_backup_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_backup( + response = await client.create_backup( + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup mock_val = resources.Backup(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_id + mock_val = "backup_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): +async def test_create_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9161,21 +9800,22 @@ async def test_update_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_backup( - service.UpdateBackupRequest(), + await client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_id="backup_id_value", ) @pytest.mark.parametrize( "request_type", [ - service.DeleteBackupRequest, + service.UpdateBackupRequest, dict, ], ) -def test_delete_backup(request_type, transport: str = "grpc"): +def test_update_backup(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9186,22 +9826,22 @@ def test_delete_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup(request) + response = client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_backup_non_empty_request_with_auto_populated_field(): +def test_update_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -9212,28 +9852,24 @@ def test_delete_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteBackupRequest( - name="name_value", + request = service.UpdateBackupRequest( request_id="request_id_value", - etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_backup(request=request) + client.update_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteBackupRequest( - name="name_value", + assert args[0] == service.UpdateBackupRequest( request_id="request_id_value", - etag="etag_value", ) -def test_delete_backup_use_cached_wrapped_rpc(): +def test_update_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9247,16 +9883,16 @@ def test_delete_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods + assert client._transport.update_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc request = {} - client.delete_backup(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -9266,7 +9902,7 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup(request) + client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9274,7 +9910,7 @@ def test_delete_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_backup_async_use_cached_wrapped_rpc( +async def test_update_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9291,7 +9927,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_backup + client._client._transport.update_backup in client._client._transport._wrapped_methods ) @@ -9299,11 +9935,11 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_backup + client._client._transport.update_backup ] = mock_rpc request = {} - await client.delete_backup(request) + await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -9313,7 +9949,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_backup(request) + await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9321,8 +9957,8 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_backup_async( - transport: str = "grpc_asyncio", request_type=service.DeleteBackupRequest +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=service.UpdateBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -9334,17 +9970,17 @@ async def test_delete_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_backup(request) + response = await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -9352,25 +9988,25 @@ async def test_delete_backup_async( @pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) -def test_delete_backup_field_headers(): +def test_update_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() - request.name = "name_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9381,28 +10017,28 @@ def test_delete_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): +async def test_update_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteBackupRequest() + request = service.UpdateBackupRequest() - request.name = "name_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_backup(request) + await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9413,35 +10049,39 @@ async def test_delete_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup.name=name_value", ) in kw["metadata"] -def test_delete_backup_flattened(): +def test_update_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup( - name="name_value", + client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_backup_flattened_error(): +def test_update_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9449,20 +10089,21 @@ def test_delete_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - service.DeleteBackupRequest(), - name="name_value", + client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_backup_flattened_async(): +async def test_update_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9471,21 +10112,25 @@ async def test_delete_backup_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup( - name="name_value", + response = await client.update_backup( + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup + mock_val = resources.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): +async def test_update_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9493,20 +10138,21 @@ async def test_delete_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup( - service.DeleteBackupRequest(), - name="name_value", + await client.update_backup( + service.UpdateBackupRequest(), + backup=resources.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.ListSupportedDatabaseFlagsRequest, + service.DeleteBackupRequest, dict, ], ) -def test_list_supported_database_flags(request_type, transport: str = "grpc"): +def test_delete_backup(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9517,27 +10163,22 @@ def test_list_supported_database_flags(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSupportedDatabaseFlagsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_supported_database_flags(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) -def test_list_supported_database_flags_non_empty_request_with_auto_populated_field(): +def test_delete_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -9548,28 +10189,28 @@ def test_list_supported_database_flags_non_empty_request_with_auto_populated_fie # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListSupportedDatabaseFlagsRequest( - parent="parent_value", - page_token="page_token_value", + request = service.DeleteBackupRequest( + name="name_value", + request_id="request_id_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_supported_database_flags(request=request) + client.delete_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListSupportedDatabaseFlagsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == service.DeleteBackupRequest( + name="name_value", + request_id="request_id_value", + etag="etag_value", ) -def test_list_supported_database_flags_use_cached_wrapped_rpc(): +def test_delete_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9583,26 +10224,26 @@ def test_list_supported_database_flags_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_supported_database_flags - in client._transport._wrapped_methods - ) + assert client._transport.delete_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_supported_database_flags - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc request = {} - client.list_supported_database_flags(request) + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_supported_database_flags(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9610,7 +10251,7 @@ def test_list_supported_database_flags_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( +async def test_delete_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9627,7 +10268,7 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_supported_database_flags + client._client._transport.delete_backup in client._client._transport._wrapped_methods ) @@ -9635,16 +10276,21 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_supported_database_flags + client._client._transport.delete_backup ] = mock_rpc request = {} - await client.list_supported_database_flags(request) + await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_supported_database_flags(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9652,9 +10298,8 @@ async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_supported_database_flags_async( - transport: str = "grpc_asyncio", - request_type=service.ListSupportedDatabaseFlagsRequest, +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=service.DeleteBackupRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -9666,50 +10311,43 @@ async def test_list_supported_database_flags_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSupportedDatabaseFlagsResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_supported_database_flags(request) + response = await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSupportedDatabaseFlagsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_supported_database_flags_async_from_dict(): - await test_list_supported_database_flags_async(request_type=dict) +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) -def test_list_supported_database_flags_field_headers(): +def test_delete_backup_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: - call.return_value = service.ListSupportedDatabaseFlagsResponse() - client.list_supported_database_flags(request) + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9720,30 +10358,28 @@ def test_list_supported_database_flags_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_supported_database_flags_field_headers_async(): +async def test_delete_backup_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListSupportedDatabaseFlagsRequest() + request = service.DeleteBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSupportedDatabaseFlagsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_supported_database_flags(request) + await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9754,37 +10390,35 @@ async def test_list_supported_database_flags_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_supported_database_flags_flattened(): +def test_delete_backup_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSupportedDatabaseFlagsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_supported_database_flags( - parent="parent_value", + client.delete_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_supported_database_flags_flattened_error(): +def test_delete_backup_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9792,45 +10426,43 @@ def test_list_supported_database_flags_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_supported_database_flags( - service.ListSupportedDatabaseFlagsRequest(), - parent="parent_value", + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_supported_database_flags_flattened_async(): +async def test_delete_backup_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListSupportedDatabaseFlagsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListSupportedDatabaseFlagsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_supported_database_flags( - parent="parent_value", + response = await client.delete_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_supported_database_flags_flattened_error_async(): +async def test_delete_backup_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9838,224 +10470,20 @@ async def test_list_supported_database_flags_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_supported_database_flags( - service.ListSupportedDatabaseFlagsRequest(), - parent="parent_value", + await client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", ) -def test_list_supported_database_flags_pager(transport_name: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_supported_database_flags( - request={}, retry=retry, timeout=timeout - ) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) - - -def test_list_supported_database_flags_pages(transport_name: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, - ) - pages = list(client.list_supported_database_flags(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_supported_database_flags_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_supported_database_flags( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in responses) - - -@pytest.mark.asyncio -async def test_list_supported_database_flags_async_pages(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_supported_database_flags), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_supported_database_flags(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - @pytest.mark.parametrize( "request_type", [ - service.GenerateClientCertificateRequest, + service.ListSupportedDatabaseFlagsRequest, dict, ], ) -def test_generate_client_certificate(request_type, transport: str = "grpc"): +def test_list_supported_database_flags(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10067,30 +10495,26 @@ def test_generate_client_certificate(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateClientCertificateResponse( - pem_certificate="pem_certificate_value", - pem_certificate_chain=["pem_certificate_chain_value"], - ca_cert="ca_cert_value", + call.return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", ) - response = client.generate_client_certificate(request) + response = client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.GenerateClientCertificateResponse) - assert response.pem_certificate == "pem_certificate_value" - assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.ca_cert == "ca_cert_value" + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" -def test_generate_client_certificate_non_empty_request_with_auto_populated_field(): +def test_list_supported_database_flags_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -10101,32 +10525,28 @@ def test_generate_client_certificate_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GenerateClientCertificateRequest( + request = service.ListSupportedDatabaseFlagsRequest( parent="parent_value", - request_id="request_id_value", - pem_csr="pem_csr_value", - public_key="public_key_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.generate_client_certificate(request=request) + client.list_supported_database_flags(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GenerateClientCertificateRequest( + assert args[0] == service.ListSupportedDatabaseFlagsRequest( parent="parent_value", - request_id="request_id_value", - pem_csr="pem_csr_value", - public_key="public_key_value", + page_token="page_token_value", ) -def test_generate_client_certificate_use_cached_wrapped_rpc(): +def test_list_supported_database_flags_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10141,7 +10561,7 @@ def test_generate_client_certificate_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.generate_client_certificate + client._transport.list_supported_database_flags in client._transport._wrapped_methods ) @@ -10151,15 +10571,15 @@ def test_generate_client_certificate_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.generate_client_certificate + client._transport.list_supported_database_flags ] = mock_rpc request = {} - client.generate_client_certificate(request) + client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.generate_client_certificate(request) + client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10167,7 +10587,7 @@ def test_generate_client_certificate_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_generate_client_certificate_async_use_cached_wrapped_rpc( +async def test_list_supported_database_flags_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10184,7 +10604,7 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.generate_client_certificate + client._client._transport.list_supported_database_flags in client._client._transport._wrapped_methods ) @@ -10192,16 +10612,16 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.generate_client_certificate + client._client._transport.list_supported_database_flags ] = mock_rpc request = {} - await client.generate_client_certificate(request) + await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.generate_client_certificate(request) + await client.list_supported_database_flags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10209,9 +10629,9 @@ async def test_generate_client_certificate_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_generate_client_certificate_async( +async def test_list_supported_database_flags_async( transport: str = "grpc_asyncio", - request_type=service.GenerateClientCertificateRequest, + request_type=service.ListSupportedDatabaseFlagsRequest, ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -10224,53 +10644,49 @@ async def test_generate_client_certificate_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateClientCertificateResponse( - pem_certificate="pem_certificate_value", - pem_certificate_chain=["pem_certificate_chain_value"], - ca_cert="ca_cert_value", + service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.generate_client_certificate(request) + response = await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, service.GenerateClientCertificateResponse) - assert response.pem_certificate == "pem_certificate_value" - assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.ca_cert == "ca_cert_value" + assert isinstance(response, pagers.ListSupportedDatabaseFlagsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_generate_client_certificate_async_from_dict(): - await test_generate_client_certificate_async(request_type=dict) +async def test_list_supported_database_flags_async_from_dict(): + await test_list_supported_database_flags_async(request_type=dict) -def test_generate_client_certificate_field_headers(): +def test_list_supported_database_flags_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: - call.return_value = service.GenerateClientCertificateResponse() - client.generate_client_certificate(request) + call.return_value = service.ListSupportedDatabaseFlagsResponse() + client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10286,25 +10702,25 @@ def test_generate_client_certificate_field_headers(): @pytest.mark.asyncio -async def test_generate_client_certificate_field_headers_async(): +async def test_list_supported_database_flags_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GenerateClientCertificateRequest() + request = service.ListSupportedDatabaseFlagsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateClientCertificateResponse() + service.ListSupportedDatabaseFlagsResponse() ) - await client.generate_client_certificate(request) + await client.list_supported_database_flags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10319,20 +10735,20 @@ async def test_generate_client_certificate_field_headers_async(): ) in kw["metadata"] -def test_generate_client_certificate_flattened(): +def test_list_supported_database_flags_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateClientCertificateResponse() + call.return_value = service.ListSupportedDatabaseFlagsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.generate_client_certificate( + client.list_supported_database_flags( parent="parent_value", ) @@ -10345,7 +10761,7 @@ def test_generate_client_certificate_flattened(): assert arg == mock_val -def test_generate_client_certificate_flattened_error(): +def test_list_supported_database_flags_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10353,31 +10769,31 @@ def test_generate_client_certificate_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.generate_client_certificate( - service.GenerateClientCertificateRequest(), + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_generate_client_certificate_flattened_async(): +async def test_list_supported_database_flags_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_client_certificate), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = service.GenerateClientCertificateResponse() + call.return_value = service.ListSupportedDatabaseFlagsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.GenerateClientCertificateResponse() + service.ListSupportedDatabaseFlagsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.generate_client_certificate( + response = await client.list_supported_database_flags( parent="parent_value", ) @@ -10391,7 +10807,7 @@ async def test_generate_client_certificate_flattened_async(): @pytest.mark.asyncio -async def test_generate_client_certificate_flattened_error_async(): +async def test_list_supported_database_flags_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10399,125 +10815,328 @@ async def test_generate_client_certificate_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.generate_client_certificate( - service.GenerateClientCertificateRequest(), + await client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.GetConnectionInfoRequest, - dict, - ], -) -def test_get_connection_info(request_type, transport: str = "grpc"): +def test_list_supported_database_flags_pager(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.ConnectionInfo( - name="name_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", - pem_certificate_chain=["pem_certificate_chain_value"], - instance_uid="instance_uid_value", - psc_dns_name="psc_dns_name_value", + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, ) - response = client.get_connection_info(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetConnectionInfoRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_supported_database_flags( + request={}, retry=retry, timeout=timeout + ) - # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConnectionInfo) - assert response.name == "name_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" - assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.instance_uid == "instance_uid_value" - assert response.psc_dns_name == "psc_dns_name_value" + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) -def test_get_connection_info_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_list_supported_database_flags_pages(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetConnectionInfoRequest( - parent="parent_value", - request_id="request_id_value", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.list_supported_database_flags), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_connection_info(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetConnectionInfoRequest( - parent="parent_value", - request_id="request_id_value", - ) - - -def test_get_connection_info_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_connection_info in client._transport._wrapped_methods + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, ) + pages = list(client.list_supported_database_flags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_connection_info - ] = mock_rpc - request = {} - client.get_connection_info(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.get_connection_info(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_supported_database_flags( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_supported_database_flags_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_supported_database_flags), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_supported_database_flags(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GenerateClientCertificateRequest, + dict, + ], +) +def test_generate_client_certificate(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ca_cert="ca_cert_value", + ) + response = client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GenerateClientCertificateRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.ca_cert == "ca_cert_value" + + +def test_generate_client_certificate_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GenerateClientCertificateRequest( + parent="parent_value", + request_id="request_id_value", + pem_csr="pem_csr_value", + public_key="public_key_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_client_certificate), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_client_certificate(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GenerateClientCertificateRequest( + parent="parent_value", + request_id="request_id_value", + pem_csr="pem_csr_value", + public_key="public_key_value", + ) + + +def test_generate_client_certificate_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_client_certificate + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_client_certificate + ] = mock_rpc + request = {} + client.generate_client_certificate(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10525,7 +11144,7 @@ def test_get_connection_info_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_connection_info_async_use_cached_wrapped_rpc( +async def test_generate_client_certificate_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10542,7 +11161,7 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_connection_info + client._client._transport.generate_client_certificate in client._client._transport._wrapped_methods ) @@ -10550,16 +11169,16 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_connection_info + client._client._transport.generate_client_certificate ] = mock_rpc request = {} - await client.get_connection_info(request) + await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_connection_info(request) + await client.generate_client_certificate(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10567,8 +11186,9 @@ async def test_get_connection_info_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_connection_info_async( - transport: str = "grpc_asyncio", request_type=service.GetConnectionInfoRequest +async def test_generate_client_certificate_async( + transport: str = "grpc_asyncio", + request_type=service.GenerateClientCertificateRequest, ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -10581,59 +11201,53 @@ async def test_get_connection_info_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ConnectionInfo( - name="name_value", - ip_address="ip_address_value", - public_ip_address="public_ip_address_value", + service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", pem_certificate_chain=["pem_certificate_chain_value"], - instance_uid="instance_uid_value", - psc_dns_name="psc_dns_name_value", + ca_cert="ca_cert_value", ) ) - response = await client.get_connection_info(request) + response = await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetConnectionInfoRequest() + request = service.GenerateClientCertificateRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConnectionInfo) - assert response.name == "name_value" - assert response.ip_address == "ip_address_value" - assert response.public_ip_address == "public_ip_address_value" + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.instance_uid == "instance_uid_value" - assert response.psc_dns_name == "psc_dns_name_value" + assert response.ca_cert == "ca_cert_value" @pytest.mark.asyncio -async def test_get_connection_info_async_from_dict(): - await test_get_connection_info_async(request_type=dict) +async def test_generate_client_certificate_async_from_dict(): + await test_generate_client_certificate_async(request_type=dict) -def test_get_connection_info_field_headers(): +def test_generate_client_certificate_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetConnectionInfoRequest() + request = service.GenerateClientCertificateRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: - call.return_value = resources.ConnectionInfo() - client.get_connection_info(request) + call.return_value = service.GenerateClientCertificateResponse() + client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10649,25 +11263,25 @@ def test_get_connection_info_field_headers(): @pytest.mark.asyncio -async def test_get_connection_info_field_headers_async(): +async def test_generate_client_certificate_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetConnectionInfoRequest() + request = service.GenerateClientCertificateRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ConnectionInfo() + service.GenerateClientCertificateResponse() ) - await client.get_connection_info(request) + await client.generate_client_certificate(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10682,20 +11296,20 @@ async def test_get_connection_info_field_headers_async(): ) in kw["metadata"] -def test_get_connection_info_flattened(): +def test_generate_client_certificate_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ConnectionInfo() + call.return_value = service.GenerateClientCertificateResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_connection_info( + client.generate_client_certificate( parent="parent_value", ) @@ -10708,7 +11322,7 @@ def test_get_connection_info_flattened(): assert arg == mock_val -def test_get_connection_info_flattened_error(): +def test_generate_client_certificate_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10716,31 +11330,31 @@ def test_get_connection_info_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_connection_info( - service.GetConnectionInfoRequest(), + client.generate_client_certificate( + service.GenerateClientCertificateRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_get_connection_info_flattened_async(): +async def test_generate_client_certificate_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_connection_info), "__call__" + type(client.transport.generate_client_certificate), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.ConnectionInfo() + call.return_value = service.GenerateClientCertificateResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.ConnectionInfo() + service.GenerateClientCertificateResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_connection_info( + response = await client.generate_client_certificate( parent="parent_value", ) @@ -10754,7 +11368,7 @@ async def test_get_connection_info_flattened_async(): @pytest.mark.asyncio -async def test_get_connection_info_flattened_error_async(): +async def test_generate_client_certificate_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10762,8 +11376,8 @@ async def test_get_connection_info_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_connection_info( - service.GetConnectionInfoRequest(), + await client.generate_client_certificate( + service.GenerateClientCertificateRequest(), parent="parent_value", ) @@ -10771,11 +11385,11 @@ async def test_get_connection_info_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - service.ListUsersRequest, + service.GetConnectionInfoRequest, dict, ], ) -def test_list_users(request_type, transport: str = "grpc"): +def test_get_connection_info(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10786,27 +11400,37 @@ def test_list_users(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListUsersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + psc_dns_name="psc_dns_name_value", ) - response = client.list_users(request) + response = client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUsersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + assert response.psc_dns_name == "psc_dns_name_value" -def test_list_users_non_empty_request_with_auto_populated_field(): +def test_get_connection_info_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -10817,30 +11441,28 @@ def test_list_users_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListUsersRequest( + request = service.GetConnectionInfoRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_users(request=request) + client.get_connection_info(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListUsersRequest( + assert args[0] == service.GetConnectionInfoRequest( parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request_id="request_id_value", ) -def test_list_users_use_cached_wrapped_rpc(): +def test_get_connection_info_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10854,21 +11476,25 @@ def test_list_users_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_users in client._transport._wrapped_methods + assert ( + client._transport.get_connection_info in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_users] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_connection_info + ] = mock_rpc request = {} - client.list_users(request) + client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_users(request) + client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10876,7 +11502,9 @@ def test_list_users_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_connection_info_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10891,7 +11519,7 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_users + client._client._transport.get_connection_info in client._client._transport._wrapped_methods ) @@ -10899,16 +11527,16 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_users + client._client._transport.get_connection_info ] = mock_rpc request = {} - await client.list_users(request) + await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_users(request) + await client.get_connection_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10916,8 +11544,8 @@ async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_list_users_async( - transport: str = "grpc_asyncio", request_type=service.ListUsersRequest +async def test_get_connection_info_async( + transport: str = "grpc_asyncio", request_type=service.GetConnectionInfoRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -10929,48 +11557,60 @@ async def test_list_users_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListUsersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", + pem_certificate_chain=["pem_certificate_chain_value"], + instance_uid="instance_uid_value", + psc_dns_name="psc_dns_name_value", ) ) - response = await client.list_users(request) + response = await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUsersAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.instance_uid == "instance_uid_value" + assert response.psc_dns_name == "psc_dns_name_value" @pytest.mark.asyncio -async def test_list_users_async_from_dict(): - await test_list_users_async(request_type=dict) +async def test_get_connection_info_async_from_dict(): + await test_get_connection_info_async(request_type=dict) -def test_list_users_field_headers(): +def test_get_connection_info_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: - call.return_value = service.ListUsersResponse() - client.list_users(request) + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: + call.return_value = resources.ConnectionInfo() + client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10986,23 +11626,25 @@ def test_list_users_field_headers(): @pytest.mark.asyncio -async def test_list_users_field_headers_async(): +async def test_get_connection_info_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListUsersRequest() + request = service.GetConnectionInfoRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListUsersResponse() + resources.ConnectionInfo() ) - await client.list_users(request) + await client.get_connection_info(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11017,18 +11659,20 @@ async def test_list_users_field_headers_async(): ) in kw["metadata"] -def test_list_users_flattened(): +def test_get_connection_info_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListUsersResponse() + call.return_value = resources.ConnectionInfo() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_users( + client.get_connection_info( parent="parent_value", ) @@ -11041,7 +11685,7 @@ def test_list_users_flattened(): assert arg == mock_val -def test_list_users_flattened_error(): +def test_get_connection_info_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11049,29 +11693,31 @@ def test_list_users_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_users( - service.ListUsersRequest(), + client.get_connection_info( + service.GetConnectionInfoRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_users_flattened_async(): +async def test_get_connection_info_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: + with mock.patch.object( + type(client.transport.get_connection_info), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = service.ListUsersResponse() + call.return_value = resources.ConnectionInfo() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListUsersResponse() + resources.ConnectionInfo() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_users( + response = await client.get_connection_info( parent="parent_value", ) @@ -11085,7 +11731,7 @@ async def test_list_users_flattened_async(): @pytest.mark.asyncio -async def test_list_users_flattened_error_async(): +async def test_get_connection_info_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11093,249 +11739,51 @@ async def test_list_users_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_users( - service.ListUsersRequest(), + await client.get_connection_info( + service.GetConnectionInfoRequest(), parent="parent_value", ) -def test_list_users_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListUsersRequest, + dict, + ], +) +def test_list_users(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_users), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_users(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.User) for i in results) - - -def test_list_users_pages(transport_name: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_users), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - pages = list(client.list_users(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_users_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_users( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.User) for i in responses) - - -@pytest.mark.asyncio -async def test_list_users_async_pages(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_users(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - service.GetUserRequest, - dict, - ], -) -def test_get_user(request_type, transport: str = "grpc"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.User( - name="name_value", - password="password_value", - database_roles=["database_roles_value"], - user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + call.return_value = service.ListUsersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.get_user(request) + response = client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.GetUserRequest() + request = service.ListUsersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.User) - assert response.name == "name_value" - assert response.password == "password_value" - assert response.database_roles == ["database_roles_value"] - assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert isinstance(response, pagers.ListUsersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_user_non_empty_request_with_auto_populated_field(): +def test_list_users_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -11346,24 +11794,30 @@ def test_get_user_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.GetUserRequest( - name="name_value", + request = service.ListUsersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_user(request=request) + client.list_users(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.GetUserRequest( - name="name_value", + assert args[0] == service.ListUsersRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_user_use_cached_wrapped_rpc(): +def test_list_users_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11377,21 +11831,21 @@ def test_get_user_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_user in client._transport._wrapped_methods + assert client._transport.list_users in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_user] = mock_rpc + client._transport._wrapped_methods[client._transport.list_users] = mock_rpc request = {} - client.get_user(request) + client.list_users(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_user(request) + client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11399,7 +11853,7 @@ def test_get_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_users_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -11414,7 +11868,7 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_user + client._client._transport.list_users in client._client._transport._wrapped_methods ) @@ -11422,16 +11876,16 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_user + client._client._transport.list_users ] = mock_rpc request = {} - await client.get_user(request) + await client.list_users(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_user(request) + await client.list_users(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11439,8 +11893,8 @@ async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_get_user_async( - transport: str = "grpc_asyncio", request_type=service.GetUserRequest +async def test_list_users_async( + transport: str = "grpc_asyncio", request_type=service.ListUsersRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -11452,52 +11906,48 @@ async def test_get_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.User( - name="name_value", - password="password_value", - database_roles=["database_roles_value"], - user_type=resources.User.UserType.ALLOYDB_BUILT_IN, - ) - ) - response = await client.get_user(request) + service.ListUsersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.GetUserRequest() + request = service.ListUsersRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.User) - assert response.name == "name_value" - assert response.password == "password_value" - assert response.database_roles == ["database_roles_value"] - assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert isinstance(response, pagers.ListUsersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_user_async_from_dict(): - await test_get_user_async(request_type=dict) +async def test_list_users_async_from_dict(): + await test_list_users_async(request_type=dict) -def test_get_user_field_headers(): +def test_list_users_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetUserRequest() + request = service.ListUsersRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: - call.return_value = resources.User() - client.get_user(request) + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value = service.ListUsersResponse() + client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11508,26 +11958,28 @@ def test_get_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_user_field_headers_async(): +async def test_list_users_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.GetUserRequest() + request = service.ListUsersRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) - await client.get_user(request) + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListUsersResponse() + ) + await client.list_users(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11538,35 +11990,35 @@ async def test_get_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_user_flattened(): +def test_list_users_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.User() + call.return_value = service.ListUsersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_user( - name="name_value", + client.list_users( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_user_flattened_error(): +def test_list_users_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11574,41 +12026,43 @@ def test_get_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_user( - service.GetUserRequest(), - name="name_value", + client.list_users( + service.ListUsersRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_user_flattened_async(): +async def test_list_users_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_user), "__call__") as call: + with mock.patch.object(type(client.transport.list_users), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.User() + call.return_value = service.ListUsersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListUsersResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_user( - name="name_value", + response = await client.list_users( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_user_flattened_error_async(): +async def test_list_users_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11616,115 +12070,307 @@ async def test_get_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_user( - service.GetUserRequest(), - name="name_value", + await client.list_users( + service.ListUsersRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - service.CreateUserRequest, - dict, - ], -) -def test_create_user(request_type, transport: str = "grpc"): +def test_list_users_pager(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.User( - name="name_value", - password="password_value", - database_roles=["database_roles_value"], - user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, ) - response = client.create_user(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateUserRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_users(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, resources.User) - assert response.name == "name_value" - assert response.password == "password_value" - assert response.database_roles == ["database_roles_value"] - assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.User) for i in results) -def test_create_user_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. + +def test_list_users_pages(transport_name: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateUserRequest( - parent="parent_value", - user_id="user_id_value", - request_id="request_id_value", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_user(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateUserRequest( - parent="parent_value", - user_id="user_id_value", - request_id="request_id_value", - ) - - -def test_create_user_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + with mock.patch.object(type(client.transport.list_users), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, ) + pages = list(client.list_users(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert client._transport.create_user in client._transport._wrapped_methods +@pytest.mark.asyncio +async def test_list_users_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, ) - client._transport._wrapped_methods[client._transport.create_user] = mock_rpc + async_pager = await client.list_users( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.User) for i in responses) + + +@pytest.mark.asyncio +async def test_list_users_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_users), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_users(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetUserRequest, + dict, + ], +) +def test_get_user(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.User( + name="name_value", + password="password_value", + database_roles=["database_roles_value"], + user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, + ) + response = client.get_user(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetUserRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.User) + assert response.name == "name_value" + assert response.password == "password_value" + assert response.database_roles == ["database_roles_value"] + assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True + + +def test_get_user_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetUserRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_user), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_user(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetUserRequest( + name="name_value", + ) + + +def test_get_user_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_user in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_user] = mock_rpc request = {} - client.create_user(request) + client.get_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_user(request) + client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11732,9 +12378,7 @@ def test_create_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_user_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_user_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -11749,7 +12393,7 @@ async def test_create_user_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_user + client._client._transport.get_user in client._client._transport._wrapped_methods ) @@ -11757,16 +12401,16 @@ async def test_create_user_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_user + client._client._transport.get_user ] = mock_rpc request = {} - await client.create_user(request) + await client.get_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_user(request) + await client.get_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11774,8 +12418,8 @@ async def test_create_user_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_user_async( - transport: str = "grpc_asyncio", request_type=service.CreateUserRequest +async def test_get_user_async( + transport: str = "grpc_asyncio", request_type=service.GetUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -11787,7 +12431,7 @@ async def test_create_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.User( @@ -11795,14 +12439,15 @@ async def test_create_user_async( password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) - response = await client.create_user(request) + response = await client.get_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.CreateUserRequest() + request = service.GetUserRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -11811,28 +12456,29 @@ async def test_create_user_async( assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.asyncio -async def test_create_user_async_from_dict(): - await test_create_user_async(request_type=dict) +async def test_get_user_async_from_dict(): + await test_get_user_async(request_type=dict) -def test_create_user_field_headers(): +def test_get_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateUserRequest() + request = service.GetUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: call.return_value = resources.User() - client.create_user(request) + client.get_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11843,26 +12489,26 @@ def test_create_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_user_field_headers_async(): +async def test_get_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.CreateUserRequest() + request = service.GetUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) - await client.create_user(request) + await client.get_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11873,43 +12519,35 @@ async def test_create_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_user_flattened(): +def test_get_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_user( - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + client.get_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].user - mock_val = resources.User(name="name_value") - assert arg == mock_val - arg = args[0].user_id - mock_val = "user_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_user_flattened_error(): +def test_get_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11917,51 +12555,41 @@ def test_create_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_user( - service.CreateUserRequest(), - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + client.get_user( + service.GetUserRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_user_flattened_async(): +async def test_get_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_user), "__call__") as call: + with mock.patch.object(type(client.transport.get_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_user( - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + response = await client.get_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].user - mock_val = resources.User(name="name_value") - assert arg == mock_val - arg = args[0].user_id - mock_val = "user_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_user_flattened_error_async(): +async def test_get_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11969,22 +12597,20 @@ async def test_create_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_user( - service.CreateUserRequest(), - parent="parent_value", - user=resources.User(name="name_value"), - user_id="user_id_value", + await client.get_user( + service.GetUserRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - service.UpdateUserRequest, + service.CreateUserRequest, dict, ], ) -def test_update_user(request_type, transport: str = "grpc"): +def test_create_user(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11995,20 +12621,21 @@ def test_update_user(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User( name="name_value", password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) - response = client.update_user(request) + response = client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.UpdateUserRequest() + request = service.CreateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -12017,9 +12644,10 @@ def test_update_user(request_type, transport: str = "grpc"): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True -def test_update_user_non_empty_request_with_auto_populated_field(): +def test_create_user_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -12030,24 +12658,28 @@ def test_update_user_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.UpdateUserRequest( + request = service.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_user(request=request) + client.create_user(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateUserRequest( + assert args[0] == service.CreateUserRequest( + parent="parent_value", + user_id="user_id_value", request_id="request_id_value", ) -def test_update_user_use_cached_wrapped_rpc(): +def test_create_user_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12061,21 +12693,21 @@ def test_update_user_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_user in client._transport._wrapped_methods + assert client._transport.create_user in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_user] = mock_rpc + client._transport._wrapped_methods[client._transport.create_user] = mock_rpc request = {} - client.update_user(request) + client.create_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_user(request) + client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12083,7 +12715,7 @@ def test_update_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_user_async_use_cached_wrapped_rpc( +async def test_create_user_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12100,7 +12732,7 @@ async def test_update_user_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_user + client._client._transport.create_user in client._client._transport._wrapped_methods ) @@ -12108,16 +12740,16 @@ async def test_update_user_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_user + client._client._transport.create_user ] = mock_rpc request = {} - await client.update_user(request) + await client.create_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_user(request) + await client.create_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12125,8 +12757,8 @@ async def test_update_user_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_user_async( - transport: str = "grpc_asyncio", request_type=service.UpdateUserRequest +async def test_create_user_async( + transport: str = "grpc_asyncio", request_type=service.CreateUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -12138,7 +12770,7 @@ async def test_update_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.User( @@ -12146,14 +12778,15 @@ async def test_update_user_async( password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) - response = await client.update_user(request) + response = await client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.UpdateUserRequest() + request = service.CreateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -12162,28 +12795,29 @@ async def test_update_user_async( assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.asyncio -async def test_update_user_async_from_dict(): - await test_update_user_async(request_type=dict) +async def test_create_user_async_from_dict(): + await test_create_user_async(request_type=dict) -def test_update_user_field_headers(): +def test_create_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateUserRequest() + request = service.CreateUserRequest() - request.user.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: call.return_value = resources.User() - client.update_user(request) + client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12194,26 +12828,26 @@ def test_update_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "user.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_user_field_headers_async(): +async def test_create_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.UpdateUserRequest() + request = service.CreateUserRequest() - request.user.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) - await client.update_user(request) + await client.create_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12224,39 +12858,43 @@ async def test_update_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "user.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_user_flattened(): +def test_create_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_user( + client.create_user( + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].user mock_val = resources.User(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].user_id + mock_val = "user_id_value" assert arg == mock_val -def test_update_user_flattened_error(): +def test_create_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12264,46 +12902,51 @@ def test_update_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_user( - service.UpdateUserRequest(), + client.create_user( + service.CreateUserRequest(), + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) @pytest.mark.asyncio -async def test_update_user_flattened_async(): +async def test_create_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_user), "__call__") as call: + with mock.patch.object(type(client.transport.create_user), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.User() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_user( + response = await client.create_user( + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].user mock_val = resources.User(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].user_id + mock_val = "user_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_user_flattened_error_async(): +async def test_create_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12311,21 +12954,22 @@ async def test_update_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_user( - service.UpdateUserRequest(), + await client.create_user( + service.CreateUserRequest(), + parent="parent_value", user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + user_id="user_id_value", ) @pytest.mark.parametrize( "request_type", [ - service.DeleteUserRequest, + service.UpdateUserRequest, dict, ], ) -def test_delete_user(request_type, transport: str = "grpc"): +def test_update_user(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12336,22 +12980,33 @@ def test_delete_user(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_user(request) + call.return_value = resources.User( + name="name_value", + password="password_value", + database_roles=["database_roles_value"], + user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, + ) + response = client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.User) + assert response.name == "name_value" + assert response.password == "password_value" + assert response.database_roles == ["database_roles_value"] + assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True -def test_delete_user_non_empty_request_with_auto_populated_field(): +def test_update_user_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -12362,26 +13017,24 @@ def test_delete_user_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.DeleteUserRequest( - name="name_value", + request = service.UpdateUserRequest( request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_user(request=request) + client.update_user(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteUserRequest( - name="name_value", + assert args[0] == service.UpdateUserRequest( request_id="request_id_value", ) -def test_delete_user_use_cached_wrapped_rpc(): +def test_update_user_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12395,21 +13048,21 @@ def test_delete_user_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_user in client._transport._wrapped_methods + assert client._transport.update_user in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_user] = mock_rpc + client._transport._wrapped_methods[client._transport.update_user] = mock_rpc request = {} - client.delete_user(request) + client.update_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_user(request) + client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12417,7 +13070,7 @@ def test_delete_user_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_user_async_use_cached_wrapped_rpc( +async def test_update_user_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12434,7 +13087,7 @@ async def test_delete_user_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_user + client._client._transport.update_user in client._client._transport._wrapped_methods ) @@ -12442,16 +13095,16 @@ async def test_delete_user_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_user + client._client._transport.update_user ] = mock_rpc request = {} - await client.delete_user(request) + await client.update_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_user(request) + await client.update_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12459,8 +13112,8 @@ async def test_delete_user_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_user_async( - transport: str = "grpc_asyncio", request_type=service.DeleteUserRequest +async def test_update_user_async( + transport: str = "grpc_asyncio", request_type=service.UpdateUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -12472,41 +13125,54 @@ async def test_delete_user_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_user(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.User( + name="name_value", + password="password_value", + database_roles=["database_roles_value"], + user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, + ) + ) + response = await client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.User) + assert response.name == "name_value" + assert response.password == "password_value" + assert response.database_roles == ["database_roles_value"] + assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.asyncio -async def test_delete_user_async_from_dict(): - await test_delete_user_async(request_type=dict) +async def test_update_user_async_from_dict(): + await test_update_user_async(request_type=dict) -def test_delete_user_field_headers(): +def test_update_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() - request.name = "name_value" + request.user.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: - call.return_value = None - client.delete_user(request) + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value = resources.User() + client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12517,26 +13183,26 @@ def test_delete_user_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "user.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_user_field_headers_async(): +async def test_update_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.DeleteUserRequest() + request = service.UpdateUserRequest() - request.name = "name_value" + request.user.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_user(request) + with mock.patch.object(type(client.transport.update_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) + await client.update_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12547,35 +13213,39 @@ async def test_delete_user_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "user.name=name_value", ) in kw["metadata"] -def test_delete_user_flattened(): +def test_update_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.User() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_user( - name="name_value", + client.update_user( + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].user + mock_val = resources.User(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_user_flattened_error(): +def test_update_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12583,41 +13253,46 @@ def test_delete_user_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_user( - service.DeleteUserRequest(), - name="name_value", + client.update_user( + service.UpdateUserRequest(), + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_user_flattened_async(): +async def test_update_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + with mock.patch.object(type(client.transport.update_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = resources.User() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.User()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_user( - name="name_value", + response = await client.update_user( + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].user + mock_val = resources.User(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_user_flattened_error_async(): +async def test_update_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12625,20 +13300,21 @@ async def test_delete_user_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_user( - service.DeleteUserRequest(), - name="name_value", + await client.update_user( + service.UpdateUserRequest(), + user=resources.User(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - service.ListDatabasesRequest, + service.DeleteUserRequest, dict, ], ) -def test_list_databases(request_type, transport: str = "grpc"): +def test_delete_user(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12649,25 +13325,22 @@ def test_list_databases(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListDatabasesResponse( - next_page_token="next_page_token_value", - ) - response = client.list_databases(request) + call.return_value = None + response = client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_databases_non_empty_request_with_auto_populated_field(): +def test_delete_user_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( @@ -12678,28 +13351,26 @@ def test_list_databases_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = service.ListDatabasesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", + request = service.DeleteUserRequest( + name="name_value", + request_id="request_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_databases(request=request) + client.delete_user(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == service.ListDatabasesRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", + assert args[0] == service.DeleteUserRequest( + name="name_value", + request_id="request_id_value", ) -def test_list_databases_use_cached_wrapped_rpc(): +def test_delete_user_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12713,21 +13384,21 @@ def test_list_databases_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods + assert client._transport.delete_user in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_user] = mock_rpc request = {} - client.list_databases(request) + client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_databases(request) + client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12735,7 +13406,7 @@ def test_list_databases_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_databases_async_use_cached_wrapped_rpc( +async def test_delete_user_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12752,7 +13423,7 @@ async def test_list_databases_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_databases + client._client._transport.delete_user in client._client._transport._wrapped_methods ) @@ -12760,16 +13431,16 @@ async def test_list_databases_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_databases + client._client._transport.delete_user ] = mock_rpc request = {} - await client.list_databases(request) + await client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_databases(request) + await client.delete_user(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12777,8 +13448,8 @@ async def test_list_databases_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_databases_async( - transport: str = "grpc_asyncio", request_type=service.ListDatabasesRequest +async def test_delete_user_async( + transport: str = "grpc_asyncio", request_type=service.DeleteUserRequest ): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), @@ -12790,46 +13461,41 @@ async def test_list_databases_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListDatabasesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_databases(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) +async def test_delete_user_async_from_dict(): + await test_delete_user_async(request_type=dict) -def test_list_databases_field_headers(): +def test_delete_user_field_headers(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value = service.ListDatabasesResponse() - client.list_databases(request) + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value = None + client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12840,28 +13506,26 @@ def test_list_databases_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_databases_field_headers_async(): +async def test_delete_user_field_headers_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = service.ListDatabasesRequest() + request = service.DeleteUserRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListDatabasesResponse() - ) - await client.list_databases(request) + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_user(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12872,35 +13536,35 @@ async def test_list_databases_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_databases_flattened(): +def test_delete_user_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListDatabasesResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_databases( - parent="parent_value", + client.delete_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_databases_flattened_error(): +def test_delete_user_flattened_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12908,43 +13572,41 @@ def test_list_databases_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_databases( - service.ListDatabasesRequest(), - parent="parent_value", + client.delete_user( + service.DeleteUserRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_databases_flattened_async(): +async def test_delete_user_flattened_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + with mock.patch.object(type(client.transport.delete_user), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = service.ListDatabasesResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - service.ListDatabasesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_databases( - parent="parent_value", + response = await client.delete_user( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): +async def test_delete_user_flattened_error_async(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12952,181 +13614,415 @@ async def test_list_databases_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_databases( - service.ListDatabasesRequest(), - parent="parent_value", + await client.delete_user( + service.DeleteUserRequest(), + name="name_value", ) -def test_list_databases_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + service.ListDatabasesRequest, + dict, + ], +) +def test_list_databases(request_type, transport: str = "grpc"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse( + next_page_token="next_page_token_value", ) - pager = client.list_databases(request={}, retry=retry, timeout=timeout) + response = client.list_databases(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListDatabasesRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Database) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" -def test_list_databases_pages(transport_name: str = "grpc"): +def test_list_databases_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListDatabasesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_databases), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_databases(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListDatabasesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) - pages = list(client.list_databases(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_databases_async_pager(): - client = AlloyDBAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) +def test_list_databases_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - RuntimeError, + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_databases( - request={}, + client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + request = {} + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_databases_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, resources.Database) for i in responses) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_databases + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_databases + ] = mock_rpc + + request = {} + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_databases_async_pages(): +async def test_list_databases_async( + transport: str = "grpc_asyncio", request_type=service.ListDatabasesRequest +): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + + +def test_list_databases_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = service.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse() + ) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_databases_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_databases(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Database) for i in results) + + +def test_list_databases_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), service.ListDatabasesResponse( databases=[ resources.Database(), @@ -13141,18 +14037,541 @@ async def test_list_databases_async_pages(): ), RuntimeError, ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_databases(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Database) for i in responses) + + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_databases(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_clusters_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + + request = {} + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +def test_get_cluster_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) -def test_list_clusters_rest_use_cached_wrapped_rpc(): +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + service.GetClusterRequest(), + name="name_value", + ) + + +def test_create_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13166,33 +14585,38 @@ def test_list_clusters_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_clusters in client._transport._wrapped_methods + assert client._transport.create_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc request = {} - client.list_clusters(request) + client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_clusters(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): +def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} request_init["parent"] = "" + request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13200,26 +14624,29 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq ) # verify fields with default values are dropped + assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "cluster_id", + "request_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -13227,6 +14654,8 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13235,7 +14664,7 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListClustersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13247,48 +14676,57 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.create_cluster(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_clusters_rest_unset_required_fields(): +def test_create_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_clusters._get_unset_required_fields({}) + unset_fields = transport.create_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", ) ) - & set(("parent",)) ) -def test_list_clusters_rest_flattened(): +def test_create_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13297,7 +14735,7 @@ def test_list_clusters_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListClustersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -13305,19 +14743,21 @@ def test_list_clusters_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_clusters(**mock_args) + client.create_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -13330,7 +14770,7 @@ def test_list_clusters_rest_flattened(): ) -def test_list_clusters_rest_flattened_error(transport: str = "rest"): +def test_create_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13339,74 +14779,17 @@ def test_list_clusters_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_clusters( - service.ListClustersRequest(), + client.create_cluster( + service.CreateClusterRequest(), parent="parent_value", - ) - - -def test_list_clusters_rest_pager(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - resources.Cluster(), - resources.Cluster(), - ], - next_page_token="abc", - ), - service.ListClustersResponse( - clusters=[], - next_page_token="def", - ), - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - ], - next_page_token="ghi", - ), - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - resources.Cluster(), - ], + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), + cluster_id="cluster_id_value", ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListClustersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_clusters(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Cluster) for i in results) - - pages = list(client.list_clusters(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_get_cluster_rest_use_cached_wrapped_rpc(): +def test_update_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13420,33 +14803,36 @@ def test_get_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cluster in client._transport._wrapped_methods + assert client._transport.update_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc request = {} - client.get_cluster(request) + client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_cluster(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): +def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13457,23 +14843,26 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13482,7 +14871,7 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13494,38 +14883,46 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.update_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_cluster_rest_unset_required_fields(): +def test_update_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("cluster",)) + ) -def test_get_cluster_rest_flattened(): +def test_update_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13534,40 +14931,43 @@ def test_get_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_cluster(**mock_args) + client.update_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/clusters/*}" + "%s/v1beta/{cluster.name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_get_cluster_rest_flattened_error(transport: str = "rest"): +def test_update_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13576,13 +14976,16 @@ def test_get_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_cluster( - service.GetClusterRequest(), - name="name_value", + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_cluster_rest_use_cached_wrapped_rpc(): +def test_upgrade_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13596,17 +14999,17 @@ def test_create_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cluster in client._transport._wrapped_methods + assert client._transport.upgrade_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.upgrade_cluster] = mock_rpc request = {} - client.create_cluster(request) + client.upgrade_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13615,19 +15018,20 @@ def test_create_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_cluster(request) + client.upgrade_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): +def test_upgrade_cluster_rest_required_fields( + request_type=service.UpgradeClusterRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["cluster_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13635,38 +15039,24 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR ) # verify fields with default values are dropped - assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).upgrade_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["clusterId"] = "cluster_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "cluster_id", - "request_id", - "validate_only", - ) - ) + ).upgrade_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == "cluster_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13687,7 +15077,7 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -13700,44 +15090,31 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.upgrade_cluster(request) - expected_params = [ - ( - "clusterId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_cluster_rest_unset_required_fields(): +def test_upgrade_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_cluster._get_unset_required_fields({}) + unset_fields = transport.upgrade_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "clusterId", - "requestId", - "validateOnly", - ) - ) + set(()) & set( ( - "parent", - "clusterId", - "cluster", + "name", + "version", ) ) ) -def test_create_cluster_rest_flattened(): +def test_upgrade_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13749,15 +15126,12 @@ def test_create_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) mock_args.update(sample_request) @@ -13768,20 +15142,20 @@ def test_create_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_cluster(**mock_args) + client.upgrade_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{parent=projects/*/locations/*}/clusters" + "%s/v1beta/{name=projects/*/locations/*/clusters/*}:upgrade" % client.transport._host, args[1], ) -def test_create_cluster_rest_flattened_error(transport: str = "rest"): +def test_upgrade_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13790,17 +15164,14 @@ def test_create_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_cluster( - service.CreateClusterRequest(), - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", + client.upgrade_cluster( + service.UpgradeClusterRequest(), + name="name_value", + version=resources.DatabaseVersion.POSTGRES_13, ) -def test_update_cluster_rest_use_cached_wrapped_rpc(): +def test_delete_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13814,17 +15185,17 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_cluster in client._transport._wrapped_methods + assert client._transport.delete_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc request = {} - client.update_cluster(request) + client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13833,17 +15204,18 @@ def test_update_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_cluster(request) + client.delete_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): +def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13854,26 +15226,30 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "etag", + "force", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13894,10 +15270,9 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -13907,33 +15282,33 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_cluster_rest_unset_required_fields(): +def test_delete_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_cluster._get_unset_required_fields({}) + unset_fields = transport.delete_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "etag", + "force", "requestId", - "updateMask", "validateOnly", ) ) - & set(("cluster",)) + & set(("name",)) ) -def test_update_cluster_rest_flattened(): +def test_delete_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13945,16 +15320,11 @@ def test_update_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -13965,20 +15335,20 @@ def test_update_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_cluster(**mock_args) + client.delete_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{cluster.name=projects/*/locations/*/clusters/*}" + "%s/v1beta/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_update_cluster_rest_flattened_error(transport: str = "rest"): +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13987,16 +15357,13 @@ def test_update_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_cluster( - service.UpdateClusterRequest(), - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", ) -def test_delete_cluster_rest_use_cached_wrapped_rpc(): +def test_promote_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14010,17 +15377,17 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cluster in client._transport._wrapped_methods + assert client._transport.promote_cluster in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc request = {} - client.delete_cluster(request) + client.promote_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14029,14 +15396,16 @@ def test_delete_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_cluster(request) + client.promote_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): +def test_promote_cluster_rest_required_fields( + request_type=service.PromoteClusterRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} @@ -14051,7 +15420,7 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).promote_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14060,16 +15429,7 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "etag", - "force", - "request_id", - "validate_only", - ) - ) + ).promote_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14095,9 +15455,10 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -14107,33 +15468,23 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.promote_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_cluster_rest_unset_required_fields(): +def test_promote_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "etag", - "force", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.promote_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_cluster_rest_flattened(): +def test_promote_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14160,20 +15511,20 @@ def test_delete_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_cluster(**mock_args) + client.promote_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/clusters/*}" + "%s/v1beta/{name=projects/*/locations/*/clusters/*}:promote" % client.transport._host, args[1], ) -def test_delete_cluster_rest_flattened_error(transport: str = "rest"): +def test_promote_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14182,13 +15533,13 @@ def test_delete_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - service.DeleteClusterRequest(), + client.promote_cluster( + service.PromoteClusterRequest(), name="name_value", ) -def test_promote_cluster_rest_use_cached_wrapped_rpc(): +def test_switchover_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14202,17 +15553,21 @@ def test_promote_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.promote_cluster in client._transport._wrapped_methods + assert ( + client._transport.switchover_cluster in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.promote_cluster] = mock_rpc + client._transport._wrapped_methods[ + client._transport.switchover_cluster + ] = mock_rpc request = {} - client.promote_cluster(request) + client.switchover_cluster(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14221,15 +15576,15 @@ def test_promote_cluster_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.promote_cluster(request) + client.switchover_cluster(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_promote_cluster_rest_required_fields( - request_type=service.PromoteClusterRequest, +def test_switchover_cluster_rest_required_fields( + request_type=service.SwitchoverClusterRequest, ): transport_class = transports.AlloyDBAdminRestTransport @@ -14245,7 +15600,7 @@ def test_promote_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).promote_cluster._get_unset_required_fields(jsonified_request) + ).switchover_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14254,7 +15609,7 @@ def test_promote_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).promote_cluster._get_unset_required_fields(jsonified_request) + ).switchover_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14293,23 +15648,23 @@ def test_promote_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.promote_cluster(request) + response = client.switchover_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_promote_cluster_rest_unset_required_fields(): +def test_switchover_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.promote_cluster._get_unset_required_fields({}) + unset_fields = transport.switchover_cluster._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_promote_cluster_rest_flattened(): +def test_switchover_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14336,20 +15691,20 @@ def test_promote_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.promote_cluster(**mock_args) + client.switchover_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/clusters/*}:promote" + "%s/v1beta/{name=projects/*/locations/*/clusters/*}:switchover" % client.transport._host, args[1], ) -def test_promote_cluster_rest_flattened_error(transport: str = "rest"): +def test_switchover_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14357,9 +15712,9 @@ def test_promote_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. - with pytest.raises(ValueError): - client.promote_cluster( - service.PromoteClusterRequest(), + with pytest.raises(ValueError): + client.switchover_cluster( + service.SwitchoverClusterRequest(), name="name_value", ) @@ -16672,6 +18027,212 @@ def test_restart_instance_rest_flattened_error(transport: str = "rest"): ) +def test_execute_sql_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc + + request = {} + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_sql_rest_required_fields(request_type=service.ExecuteSqlRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["database"] = "" + request_init["user"] = "" + request_init["sql_statement"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + jsonified_request["database"] = "database_value" + jsonified_request["user"] = "user_value" + jsonified_request["sqlStatement"] = "sql_statement_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + assert "database" in jsonified_request + assert jsonified_request["database"] == "database_value" + assert "user" in jsonified_request + assert jsonified_request["user"] == "user_value" + assert "sqlStatement" in jsonified_request + assert jsonified_request["sqlStatement"] == "sql_statement_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ExecuteSqlResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ExecuteSqlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.execute_sql(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_execute_sql_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.execute_sql._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instance", + "database", + "user", + "sqlStatement", + ) + ) + ) + + +def test_execute_sql_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ExecuteSqlResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ExecuteSqlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.execute_sql(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{instance=projects/*/locations/*/clusters/*/instances/*}:executeSql" + % client.transport._host, + args[1], + ) + + +def test_execute_sql_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.execute_sql( + service.ExecuteSqlRequest(), + instance="instance_value", + database="database_value", + user="user_value", + sql_statement="sql_statement_value", + password="password_value", + ) + + def test_list_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -19803,6 +21364,27 @@ def test_update_cluster_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_cluster_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.upgrade_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpgradeClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_cluster_empty_call_grpc(): @@ -19845,6 +21427,29 @@ def test_promote_cluster_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_switchover_cluster_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.switchover_cluster), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.switchover_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.SwitchoverClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_restore_cluster_empty_call_grpc(): @@ -20105,6 +21710,27 @@ def test_restart_instance_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + call.return_value = service.ExecuteSqlResponse() + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ExecuteSqlRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_backups_empty_call_grpc(): @@ -20471,6 +22097,7 @@ async def test_get_cluster_empty_call_grpc_asyncio(): etag="etag_value", reconciling=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) ) await client.get_cluster(request=None) @@ -20518,17 +22145,67 @@ async def test_update_cluster_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateClusterRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_upgrade_cluster_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.upgrade_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpgradeClusterRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_cluster_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.update_cluster(request=None) + await client.delete_cluster(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.UpdateClusterRequest() + request_msg = service.DeleteClusterRequest() assert args[0] == request_msg @@ -20536,24 +22213,24 @@ async def test_update_cluster_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_delete_cluster_empty_call_grpc_asyncio(): +async def test_promote_cluster_empty_call_grpc_asyncio(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.delete_cluster(request=None) + await client.promote_cluster(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.DeleteClusterRequest() + request_msg = service.PromoteClusterRequest() assert args[0] == request_msg @@ -20561,24 +22238,26 @@ async def test_delete_cluster_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_promote_cluster_empty_call_grpc_asyncio(): +async def test_switchover_cluster_empty_call_grpc_asyncio(): client = AlloyDBAdminAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.promote_cluster), "__call__") as call: + with mock.patch.object( + type(client.transport.switchover_cluster), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.promote_cluster(request=None) + await client.switchover_cluster(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.PromoteClusterRequest() + request_msg = service.SwitchoverClusterRequest() assert args[0] == request_msg @@ -20908,6 +22587,31 @@ async def test_restart_instance_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_sql_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ExecuteSqlResponse() + ) + await client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ExecuteSqlRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -21190,6 +22894,7 @@ async def test_get_user_empty_call_grpc_asyncio(): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) await client.get_user(request=None) @@ -21220,6 +22925,7 @@ async def test_create_user_empty_call_grpc_asyncio(): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) await client.create_user(request=None) @@ -21250,6 +22956,7 @@ async def test_update_user_empty_call_grpc_asyncio(): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) ) await client.update_user(request=None) @@ -21490,6 +23197,7 @@ def test_get_cluster_rest_call_success(request_type): etag="etag_value", reconciling=True, satisfies_pzs=True, + subscription_type=resources.SubscriptionType.STANDARD, ) # Wrap the value into a proper Response obj @@ -21515,6 +23223,7 @@ def test_get_cluster_rest_call_success(request_type): assert response.etag == "etag_value" assert response.reconciling is True assert response.satisfies_pzs is True + assert response.subscription_type == resources.SubscriptionType.STANDARD @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -21617,6 +23326,11 @@ def test_create_cluster_rest_call_success(request_type): "reference_id": "reference_id_value", "source_type": 1, }, + "cloudsql_backup_run_source": { + "project": "project_value", + "instance_id": "instance_id_value", + "backup_run_id": 1366, + }, "name": "name_value", "display_name": "display_name_value", "uid": "uid_value", @@ -21678,10 +23392,20 @@ def test_create_cluster_rest_call_success(request_type): ] }, "satisfies_pzs": True, + "psc_config": {"psc_enabled": True}, "maintenance_update_policy": { "maintenance_windows": [{"day": 1, "start_time": {}}] }, "maintenance_schedule": {"start_time": {}}, + "gemini_config": {"entitled": True}, + "subscription_type": 1, + "trial_metadata": { + "start_time": {}, + "end_time": {}, + "upgrade_time": {}, + "grace_end_time": {}, + }, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -21875,6 +23599,11 @@ def test_update_cluster_rest_call_success(request_type): "reference_id": "reference_id_value", "source_type": 1, }, + "cloudsql_backup_run_source": { + "project": "project_value", + "instance_id": "instance_id_value", + "backup_run_id": 1366, + }, "name": "projects/sample1/locations/sample2/clusters/sample3", "display_name": "display_name_value", "uid": "uid_value", @@ -21936,10 +23665,20 @@ def test_update_cluster_rest_call_success(request_type): ] }, "satisfies_pzs": True, + "psc_config": {"psc_enabled": True}, "maintenance_update_policy": { "maintenance_windows": [{"day": 1, "start_time": {}}] }, "maintenance_schedule": {"start_time": {}}, + "gemini_config": {"entitled": True}, + "subscription_type": 1, + "trial_metadata": { + "start_time": {}, + "end_time": {}, + "upgrade_time": {}, + "grace_end_time": {}, + }, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -21996,18 +23735,240 @@ def get_message_fields(field): } ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upgrade_cluster_rest_bad_request(request_type=service.UpgradeClusterRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.upgrade_cluster(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpgradeClusterRequest, + dict, + ], +) +def test_upgrade_cluster_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.upgrade_cluster(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upgrade_cluster_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_upgrade_cluster" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_upgrade_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpgradeClusterRequest.pb(service.UpgradeClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = service.UpgradeClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.upgrade_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_cluster(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22021,14 +23982,14 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cluster_rest_interceptors(null_interceptor): +def test_delete_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22044,13 +24005,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22063,7 +24024,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = service.UpdateClusterRequest() + request = service.DeleteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22071,7 +24032,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_cluster( + client.delete_cluster( request, metadata=[ ("key", "val"), @@ -22083,7 +24044,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): +def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22102,17 +24063,17 @@ def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.delete_cluster(request) + client.promote_cluster(request) @pytest.mark.parametrize( "request_type", [ - service.DeleteClusterRequest, + service.PromoteClusterRequest, dict, ], ) -def test_delete_cluster_rest_call_success(request_type): +def test_promote_cluster_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22132,14 +24093,14 @@ def test_delete_cluster_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.promote_cluster(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_cluster_rest_interceptors(null_interceptor): +def test_promote_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22155,13 +24116,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) + pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22174,7 +24135,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = service.DeleteClusterRequest() + request = service.PromoteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22182,7 +24143,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cluster( + client.promote_cluster( request, metadata=[ ("key", "val"), @@ -22194,7 +24155,9 @@ def test_delete_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterRequest): +def test_switchover_cluster_rest_bad_request( + request_type=service.SwitchoverClusterRequest, +): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22213,17 +24176,17 @@ def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterReq response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.promote_cluster(request) + client.switchover_cluster(request) @pytest.mark.parametrize( "request_type", [ - service.PromoteClusterRequest, + service.SwitchoverClusterRequest, dict, ], ) -def test_promote_cluster_rest_call_success(request_type): +def test_switchover_cluster_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22243,14 +24206,14 @@ def test_promote_cluster_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.promote_cluster(request) + response = client.switchover_cluster(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_promote_cluster_rest_interceptors(null_interceptor): +def test_switchover_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22266,13 +24229,15 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" + transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_switchover_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) + pb_message = service.SwitchoverClusterRequest.pb( + service.SwitchoverClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22285,7 +24250,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = service.PromoteClusterRequest() + request = service.SwitchoverClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22293,7 +24258,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.promote_cluster( + client.switchover_cluster( request, metadata=[ ("key", "val"), @@ -22464,6 +24429,11 @@ def test_create_secondary_cluster_rest_call_success(request_type): "reference_id": "reference_id_value", "source_type": 1, }, + "cloudsql_backup_run_source": { + "project": "project_value", + "instance_id": "instance_id_value", + "backup_run_id": 1366, + }, "name": "name_value", "display_name": "display_name_value", "uid": "uid_value", @@ -22525,10 +24495,20 @@ def test_create_secondary_cluster_rest_call_success(request_type): ] }, "satisfies_pzs": True, + "psc_config": {"psc_enabled": True}, "maintenance_update_policy": { "maintenance_windows": [{"day": 1, "start_time": {}}] }, "maintenance_schedule": {"start_time": {}}, + "gemini_config": {"entitled": True}, + "subscription_type": 1, + "trial_metadata": { + "start_time": {}, + "end_time": {}, + "upgrade_time": {}, + "grace_end_time": {}, + }, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -23001,6 +24981,17 @@ def test_create_instance_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23019,30 +25010,14 @@ def test_create_instance_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], - "psc_interface_configs": [ - { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", - } - ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", - ], - "psc_enabled": True, + "psc_dns_name": "psc_dns_name_value", }, "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, "outbound_public_ip_addresses": [ "outbound_public_ip_addresses_value1", "outbound_public_ip_addresses_value2", @@ -23255,6 +25230,17 @@ def test_create_secondary_instance_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23273,30 +25259,14 @@ def test_create_secondary_instance_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], - "psc_interface_configs": [ - { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", - } - ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", - ], - "psc_enabled": True, + "psc_dns_name": "psc_dns_name_value", }, "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, "outbound_public_ip_addresses": [ "outbound_public_ip_addresses_value1", "outbound_public_ip_addresses_value2", @@ -23516,6 +25486,17 @@ def test_batch_create_instances_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23534,24 +25515,7 @@ def test_batch_create_instances_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], - "psc_interface_configs": [ - { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", - } - ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", - ], - "psc_enabled": True, + "psc_dns_name": "psc_dns_name_value", }, "network_config": { "authorized_external_networks": [ @@ -23560,6 +25524,7 @@ def test_batch_create_instances_rest_call_success(request_type): "enable_public_ip": True, "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, "outbound_public_ip_addresses": [ "outbound_public_ip_addresses_value1", "outbound_public_ip_addresses_value2", @@ -23785,6 +25750,17 @@ def test_update_instance_rest_call_success(request_type): "query_string_length": 2061, "query_plans_per_minute": 2378, }, + "observability_config": { + "enabled": True, + "preserve_comments": True, + "track_wait_events": True, + "track_wait_event_types": True, + "max_query_string_length": 2482, + "record_application_tags": True, + "query_plans_per_minute": 2378, + "track_active_queries": True, + "track_client_address": True, + }, "read_pool_config": {"node_count": 1070}, "ip_address": "ip_address_value", "public_ip_address": "public_ip_address_value", @@ -23803,30 +25779,14 @@ def test_update_instance_rest_call_success(request_type): "allowed_consumer_projects_value1", "allowed_consumer_projects_value2", ], - "allowed_consumer_networks": [ - "allowed_consumer_networks_value1", - "allowed_consumer_networks_value2", - ], - "psc_interface_configs": [ - { - "consumer_endpoint_ips": [ - "consumer_endpoint_ips_value1", - "consumer_endpoint_ips_value2", - ], - "network_attachment": "network_attachment_value", - } - ], - "outgoing_service_attachment_links": [ - "outgoing_service_attachment_links_value1", - "outgoing_service_attachment_links_value2", - ], - "psc_enabled": True, + "psc_dns_name": "psc_dns_name_value", }, "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, "enable_outbound_public_ip": True, }, + "gemini_config": {"entitled": True}, "outbound_public_ip_addresses": [ "outbound_public_ip_addresses_value1", "outbound_public_ip_addresses_value2", @@ -24438,6 +26398,122 @@ def test_restart_instance_rest_interceptors(null_interceptor): post.assert_called_once() +def test_execute_sql_rest_bad_request(request_type=service.ExecuteSqlRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "instance": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.execute_sql(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ExecuteSqlRequest, + dict, + ], +) +def test_execute_sql_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ExecuteSqlResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ExecuteSqlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.execute_sql(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.ExecuteSqlResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_sql_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_execute_sql" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_execute_sql" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ExecuteSqlRequest.pb(service.ExecuteSqlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = service.ExecuteSqlResponse.to_json(service.ExecuteSqlResponse()) + req.return_value.content = return_value + + request = service.ExecuteSqlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ExecuteSqlResponse() + + client.execute_sql( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + def test_list_backups_rest_bad_request(request_type=service.ListBackupsRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -24758,6 +26834,7 @@ def test_create_backup_rest_call_success(request_type): "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, "satisfies_pzs": True, "database_version": 1, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -24967,6 +27044,7 @@ def test_update_backup_rest_call_success(request_type): "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, "satisfies_pzs": True, "database_version": 1, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -25767,6 +27845,7 @@ def test_get_user_rest_call_success(request_type): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) # Wrap the value into a proper Response obj @@ -25786,6 +27865,7 @@ def test_get_user_rest_call_success(request_type): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -25883,6 +27963,7 @@ def test_create_user_rest_call_success(request_type): "password": "password_value", "database_roles": ["database_roles_value1", "database_roles_value2"], "user_type": 1, + "keep_extra_roles": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -25961,6 +28042,7 @@ def get_message_fields(field): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) # Wrap the value into a proper Response obj @@ -25980,6 +28062,7 @@ def get_message_fields(field): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -26085,6 +28168,7 @@ def test_update_user_rest_call_success(request_type): "password": "password_value", "database_roles": ["database_roles_value1", "database_roles_value2"], "user_type": 1, + "keep_extra_roles": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -26163,6 +28247,7 @@ def get_message_fields(field): password="password_value", database_roles=["database_roles_value"], user_type=resources.User.UserType.ALLOYDB_BUILT_IN, + keep_extra_roles=True, ) # Wrap the value into a proper Response obj @@ -26182,6 +28267,7 @@ def get_message_fields(field): assert response.password == "password_value" assert response.database_roles == ["database_roles_value"] assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response.keep_extra_roles is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -26904,6 +28990,26 @@ def test_update_cluster_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_cluster_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.upgrade_cluster), "__call__") as call: + client.upgrade_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpgradeClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_cluster_empty_call_rest(): @@ -26944,6 +29050,28 @@ def test_promote_cluster_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_switchover_cluster_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.switchover_cluster), "__call__" + ) as call: + client.switchover_cluster(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.SwitchoverClusterRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_restore_cluster_empty_call_rest(): @@ -27192,6 +29320,26 @@ def test_restart_instance_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_sql), "__call__") as call: + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ExecuteSqlRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_backups_empty_call_rest(): @@ -27532,8 +29680,10 @@ def test_alloy_db_admin_base_transport(): "get_cluster", "create_cluster", "update_cluster", + "upgrade_cluster", "delete_cluster", "promote_cluster", + "switchover_cluster", "restore_cluster", "create_secondary_cluster", "list_instances", @@ -27546,6 +29696,7 @@ def test_alloy_db_admin_base_transport(): "failover_instance", "inject_fault", "restart_instance", + "execute_sql", "list_backups", "get_backup", "create_backup", @@ -27838,12 +29989,18 @@ def test_alloy_db_admin_client_transport_session_collision(transport_name): session1 = client1.transport.update_cluster._session session2 = client2.transport.update_cluster._session assert session1 != session2 + session1 = client1.transport.upgrade_cluster._session + session2 = client2.transport.upgrade_cluster._session + assert session1 != session2 session1 = client1.transport.delete_cluster._session session2 = client2.transport.delete_cluster._session assert session1 != session2 session1 = client1.transport.promote_cluster._session session2 = client2.transport.promote_cluster._session assert session1 != session2 + session1 = client1.transport.switchover_cluster._session + session2 = client2.transport.switchover_cluster._session + assert session1 != session2 session1 = client1.transport.restore_cluster._session session2 = client2.transport.restore_cluster._session assert session1 != session2 @@ -27880,6 +30037,9 @@ def test_alloy_db_admin_client_transport_session_collision(transport_name): session1 = client1.transport.restart_instance._session session2 = client2.transport.restart_instance._session assert session1 != session2 + session1 = client1.transport.execute_sql._session + session2 = client2.transport.execute_sql._session + assert session1 != session2 session1 = client1.transport.list_backups._session session2 = client2.transport.list_backups._session assert session1 != session2 diff --git a/packages/google-cloud-api-gateway/CHANGELOG.md b/packages/google-cloud-api-gateway/CHANGELOG.md index 02377a0376ca..ec0fe0b0db60 100644 --- a/packages/google-cloud-api-gateway/CHANGELOG.md +++ b/packages/google-cloud-api-gateway/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.10.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-gateway-v1.10.0...google-cloud-api-gateway-v1.10.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-gateway-v1.9.5...google-cloud-api-gateway-v1.10.0) (2024-10-24) diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py index d1d2a9e60a97..f1d827b5c728 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py index d1d2a9e60a97..f1d827b5c728 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py index d3ad4c8f4101..83f187d100d2 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py @@ -562,36 +562,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ApiGatewayServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -601,13 +571,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ApiGatewayServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json index 0e96fa0e231b..8a5e1a18b8e1 100644 --- a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json +++ b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-gateway", - "version": "1.10.0" + "version": "1.10.1" }, "snippets": [ { diff --git a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py index 37e8d72a2b43..2dfe86359e2c 100644 --- a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py +++ b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py @@ -340,86 +340,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ApiGatewayServiceClient, transports.ApiGatewayServiceGrpcTransport, "grpc"), - (ApiGatewayServiceClient, transports.ApiGatewayServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-api-keys/CHANGELOG.md b/packages/google-cloud-api-keys/CHANGELOG.md index 27afb860d5de..237cb1c30ffa 100644 --- a/packages/google-cloud-api-keys/CHANGELOG.md +++ b/packages/google-cloud-api-keys/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.5.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-keys-v0.5.12...google-cloud-api-keys-v0.5.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.5.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-keys-v0.5.11...google-cloud-api-keys-v0.5.12) (2024-10-24) diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py index cc43a639a105..bf678492aaad 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.12" # {x-release-please-version} +__version__ = "0.5.13" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py index cc43a639a105..bf678492aaad 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.12" # {x-release-please-version} +__version__ = "0.5.13" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py index 9e6146d5a254..544a083e78ed 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py @@ -466,36 +466,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ApiKeysClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -505,13 +475,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ApiKeysClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json index 0561851dc9bd..da595c436b65 100644 --- a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json +++ b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-keys", - "version": "0.5.12" + "version": "0.5.13" }, "snippets": [ { diff --git a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py index facc09136402..699bfd1d45fa 100644 --- a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py +++ b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py @@ -296,86 +296,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ApiKeysClient, transports.ApiKeysGrpcTransport, "grpc"), - (ApiKeysClient, transports.ApiKeysRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apigee-connect/CHANGELOG.md b/packages/google-cloud-apigee-connect/CHANGELOG.md index af6cc52447e3..8bf7967b4903 100644 --- a/packages/google-cloud-apigee-connect/CHANGELOG.md +++ b/packages/google-cloud-apigee-connect/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.10.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-connect-v1.10.0...google-cloud-apigee-connect-v1.10.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-connect-v1.9.5...google-cloud-apigee-connect-v1.10.0) (2024-10-24) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py index d1d2a9e60a97..f1d827b5c728 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py index d1d2a9e60a97..f1d827b5c728 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py index 8b0713d63ed3..57386b00cd6d 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py @@ -456,36 +456,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConnectionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -495,13 +465,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConnectionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py index 7b651a854222..76e2ab733e96 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TetherClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TetherClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json index 818baf25c308..c859fe599d2a 100644 --- a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json +++ b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-connect", - "version": "1.10.0" + "version": "1.10.1" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py index 355799758d93..e9418bd984a8 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py @@ -322,85 +322,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py index eddc93ede74a..9018fd010c76 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py @@ -279,85 +279,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TetherClient, transports.TetherGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apigee-registry/CHANGELOG.md b/packages/google-cloud-apigee-registry/CHANGELOG.md index 6ae1c2c83460..488b4cfd17bd 100644 --- a/packages/google-cloud-apigee-registry/CHANGELOG.md +++ b/packages/google-cloud-apigee-registry/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.6.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-registry-v0.6.12...google-cloud-apigee-registry-v0.6.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.6.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-registry-v0.6.11...google-cloud-apigee-registry-v0.6.12) (2024-10-24) diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py index 44e5c049e336..b72badcc1eca 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.12" # {x-release-please-version} +__version__ = "0.6.13" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py index 44e5c049e336..b72badcc1eca 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.12" # {x-release-please-version} +__version__ = "0.6.13" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py index b4130fca7e63..aeaea4f8ba3f 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py @@ -469,36 +469,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ProvisioningClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -508,13 +478,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ProvisioningClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py index 7e1b6e6ea32c..43b0a11b2a0e 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py @@ -566,36 +566,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegistryClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -605,13 +575,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegistryClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json index a1ac0971c707..70bba77cb438 100644 --- a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json +++ b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-registry", - "version": "0.6.12" + "version": "0.6.13" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py index 60c06698ca91..962e61a891b1 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py @@ -318,86 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), - (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py index 5b7f7065e3d5..d5e5078e6acc 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py @@ -299,86 +299,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegistryClient, transports.RegistryGrpcTransport, "grpc"), - (RegistryClient, transports.RegistryRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apihub/CHANGELOG.md b/packages/google-cloud-apihub/CHANGELOG.md index a85df21dd103..8f74ed60de4b 100644 --- a/packages/google-cloud-apihub/CHANGELOG.md +++ b/packages/google-cloud-apihub/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.2.1...google-cloud-apihub-v0.2.2) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.2.0...google-cloud-apihub-v0.2.1) (2024-10-24) diff --git a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py index 6d8247638d59..d1a1a883babd 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py index 6d8247638d59..d1a1a883babd 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py index 5aba7786ce4c..dc5bfa82b1f8 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py @@ -630,36 +630,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ApiHubClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -669,13 +639,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ApiHubClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py index a0f477bdc990..64a45030eb07 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py @@ -488,36 +488,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ApiHubDependenciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -527,13 +497,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ApiHubDependenciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py index 4a266b8071d9..f4cebeb8cb2f 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py @@ -479,36 +479,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ApiHubPluginClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -518,13 +488,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ApiHubPluginClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py index 1aaa11b52562..e977d349f283 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py @@ -470,36 +470,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = HostProjectRegistrationServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -509,13 +479,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or HostProjectRegistrationServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py index 418f3bdac4cc..6e65e8a84af9 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py @@ -486,36 +486,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = LintingServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -525,13 +495,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or LintingServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py index 187144d07e16..3ac25e6aa355 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py @@ -462,36 +462,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ProvisioningClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -501,13 +471,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ProvisioningClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py index 940a4ce73410..16f69c687758 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py @@ -470,36 +470,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RuntimeProjectAttachmentServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -509,13 +479,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RuntimeProjectAttachmentServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json index 54afd714fa78..208b34aaaa8e 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apihub", - "version": "0.2.1" + "version": "0.2.2" }, "snippets": [ { diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py index 5a2dc217445b..8557fbc9ac5f 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py @@ -276,85 +276,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ApiHubClient, transports.ApiHubRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py index 96280c9c8baf..576d49cc9b83 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py @@ -329,85 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py index a7d41bc5fed0..51b127a5a5f7 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py @@ -298,85 +298,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py index bdfcb57e6cf5..c5d1ab19ecec 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py @@ -347,89 +347,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py index ad5cd2e5419b..3dd2b4a666d9 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py @@ -314,85 +314,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py index cdff93952823..aea58fb4b312 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py @@ -305,85 +305,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py index accf183648a8..9ca7085c4847 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py @@ -351,89 +351,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/CHANGELOG.md b/packages/google-cloud-appengine-admin/CHANGELOG.md index 1d2a74f14b43..8d5b90f8b6c5 100644 --- a/packages/google-cloud-appengine-admin/CHANGELOG.md +++ b/packages/google-cloud-appengine-admin/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-appengine-admin-v1.12.0...google-cloud-appengine-admin-v1.12.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-appengine-admin-v1.11.5...google-cloud-appengine-admin-v1.12.0) (2024-10-24) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py index 739fdfae141c..49ddc22ee702 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py index 739fdfae141c..49ddc22ee702 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py index 5010433228ac..8c9c44b9389d 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py @@ -441,36 +441,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ApplicationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -480,13 +450,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ApplicationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py index e2633508c5c4..fab20c6c3455 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py @@ -446,36 +446,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AuthorizedCertificatesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -485,13 +455,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AuthorizedCertificatesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py index 64df5a7c08c9..c020391620e7 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AuthorizedDomainsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AuthorizedDomainsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py index cfda1bd8fedb..7d180628344e 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py @@ -444,36 +444,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DomainMappingsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -483,13 +453,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DomainMappingsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py index ac94c119d5e5..0dcffd4dc2eb 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py @@ -449,36 +449,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FirewallClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -488,13 +458,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FirewallClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py index cb551f26c8f2..7463d2db4d74 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py @@ -467,36 +467,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstancesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -506,13 +476,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstancesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py index a4440b5042fe..2ecb6e192e10 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py @@ -443,36 +443,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ServicesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -482,13 +452,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ServicesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py index 20dfb5b039de..bdee4bba3e16 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py @@ -445,36 +445,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = VersionsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -484,13 +454,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or VersionsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json index ed1da0c26d00..81c672284b63 100644 --- a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json +++ b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-appengine-admin", - "version": "1.12.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py index 2b0a56308357..b2b59b82e43c 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py @@ -315,86 +315,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ApplicationsClient, transports.ApplicationsGrpcTransport, "grpc"), - (ApplicationsClient, transports.ApplicationsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py index 416cae547ed4..6259710fa02c 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py @@ -337,94 +337,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AuthorizedCertificatesClient, - transports.AuthorizedCertificatesGrpcTransport, - "grpc", - ), - ( - AuthorizedCertificatesClient, - transports.AuthorizedCertificatesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py index 35496281ee94..60f3bb84d664 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py @@ -327,86 +327,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AuthorizedDomainsClient, transports.AuthorizedDomainsGrpcTransport, "grpc"), - (AuthorizedDomainsClient, transports.AuthorizedDomainsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py index a9e2c6797157..6cefee35a9e5 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py @@ -331,86 +331,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DomainMappingsClient, transports.DomainMappingsGrpcTransport, "grpc"), - (DomainMappingsClient, transports.DomainMappingsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py index 5ab189956a41..44e92176cf49 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py @@ -291,86 +291,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirewallClient, transports.FirewallGrpcTransport, "grpc"), - (FirewallClient, transports.FirewallRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py index fa833efec1f6..7abe36da166f 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py @@ -306,86 +306,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (InstancesClient, transports.InstancesGrpcTransport, "grpc"), - (InstancesClient, transports.InstancesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py index 90e7a55b0a3b..6517ea9f2963 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py @@ -304,86 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ServicesClient, transports.ServicesGrpcTransport, "grpc"), - (ServicesClient, transports.ServicesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py index 99b8d859ab3e..1a379fbd916d 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py @@ -306,86 +306,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (VersionsClient, transports.VersionsGrpcTransport, "grpc"), - (VersionsClient, transports.VersionsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apphub/CHANGELOG.md b/packages/google-cloud-apphub/CHANGELOG.md index f5ab5c892704..d3ab72407619 100644 --- a/packages/google-cloud-apphub/CHANGELOG.md +++ b/packages/google-cloud-apphub/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apphub-v0.1.3...google-cloud-apphub-v0.1.4) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apphub-v0.1.2...google-cloud-apphub-v0.1.3) (2024-10-24) diff --git a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py index cffae475bf73..9e1aef923d80 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py @@ -596,36 +596,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AppHubClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -635,13 +605,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AppHubClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json index 2d91af205a2d..7f54a8e2552a 100644 --- a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json +++ b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apphub", - "version": "0.1.3" + "version": "0.1.4" }, "snippets": [ { diff --git a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py index d40134958924..cca807929893 100644 --- a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py +++ b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py @@ -310,86 +310,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AppHubClient, transports.AppHubGrpcTransport, "grpc"), - (AppHubClient, transports.AppHubRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-artifact-registry/CHANGELOG.md b/packages/google-cloud-artifact-registry/CHANGELOG.md index aac2f8b782b1..501c7eff56a6 100644 --- a/packages/google-cloud-artifact-registry/CHANGELOG.md +++ b/packages/google-cloud-artifact-registry/CHANGELOG.md @@ -1,5 +1,29 @@ # Changelog +## [1.13.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-artifact-registry-v1.13.0...google-cloud-artifact-registry-v1.13.1) (2024-11-11) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + +## [1.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-artifact-registry-v1.12.0...google-cloud-artifact-registry-v1.13.0) (2024-10-28) + + +### Features + +* Add Artifact Registry attachment API ([c2d1df7](https://github.com/googleapis/google-cloud-python/commit/c2d1df74b7284f96ab60be091ae8d8139dd777c6)) +* Add Artifact Registry custom remote support ([c2d1df7](https://github.com/googleapis/google-cloud-python/commit/c2d1df74b7284f96ab60be091ae8d8139dd777c6)) +* Add Artifact Registry generic repository support ([c2d1df7](https://github.com/googleapis/google-cloud-python/commit/c2d1df74b7284f96ab60be091ae8d8139dd777c6)) +* Add Artifact Registry rule APIs ([c2d1df7](https://github.com/googleapis/google-cloud-python/commit/c2d1df74b7284f96ab60be091ae8d8139dd777c6)) +* Add Artifact Registry server side resource filtering and sorting ([c2d1df7](https://github.com/googleapis/google-cloud-python/commit/c2d1df74b7284f96ab60be091ae8d8139dd777c6)) +* Add Artifact Registry UpdateFile and DeleteFile APIs ([c2d1df7](https://github.com/googleapis/google-cloud-python/commit/c2d1df74b7284f96ab60be091ae8d8139dd777c6)) + + +### Documentation + +* Include max page size for all Artifact Registry APIs ([c2d1df7](https://github.com/googleapis/google-cloud-python/commit/c2d1df74b7284f96ab60be091ae8d8139dd777c6)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-artifact-registry-v1.11.5...google-cloud-artifact-registry-v1.12.0) (2024-10-24) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py index 9a67e293611e..7baef65b58aa 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py @@ -50,19 +50,31 @@ NpmPackage, PythonPackage, ) +from google.cloud.artifactregistry_v1.types.attachment import ( + Attachment, + CreateAttachmentRequest, + DeleteAttachmentRequest, + GetAttachmentRequest, + ListAttachmentsRequest, + ListAttachmentsResponse, +) from google.cloud.artifactregistry_v1.types.file import ( + DeleteFileRequest, File, GetFileRequest, Hash, ListFilesRequest, ListFilesResponse, + UpdateFileRequest, ) +from google.cloud.artifactregistry_v1.types.generic import GenericArtifact from google.cloud.artifactregistry_v1.types.package import ( DeletePackageRequest, GetPackageRequest, ListPackagesRequest, ListPackagesResponse, Package, + UpdatePackageRequest, ) from google.cloud.artifactregistry_v1.types.repository import ( CleanupPolicy, @@ -79,6 +91,15 @@ UpstreamPolicy, VirtualRepositoryConfig, ) +from google.cloud.artifactregistry_v1.types.rule import ( + CreateRuleRequest, + DeleteRuleRequest, + GetRuleRequest, + ListRulesRequest, + ListRulesResponse, + Rule, + UpdateRuleRequest, +) from google.cloud.artifactregistry_v1.types.service import OperationMetadata from google.cloud.artifactregistry_v1.types.settings import ( GetProjectSettingsRequest, @@ -101,6 +122,7 @@ GetVersionRequest, ListVersionsRequest, ListVersionsResponse, + UpdateVersionRequest, Version, VersionView, ) @@ -143,16 +165,26 @@ "MavenArtifact", "NpmPackage", "PythonPackage", + "Attachment", + "CreateAttachmentRequest", + "DeleteAttachmentRequest", + "GetAttachmentRequest", + "ListAttachmentsRequest", + "ListAttachmentsResponse", + "DeleteFileRequest", "File", "GetFileRequest", "Hash", "ListFilesRequest", "ListFilesResponse", + "UpdateFileRequest", + "GenericArtifact", "DeletePackageRequest", "GetPackageRequest", "ListPackagesRequest", "ListPackagesResponse", "Package", + "UpdatePackageRequest", "CleanupPolicy", "CleanupPolicyCondition", "CleanupPolicyMostRecentVersions", @@ -166,6 +198,13 @@ "UpdateRepositoryRequest", "UpstreamPolicy", "VirtualRepositoryConfig", + "CreateRuleRequest", + "DeleteRuleRequest", + "GetRuleRequest", + "ListRulesRequest", + "ListRulesResponse", + "Rule", + "UpdateRuleRequest", "OperationMetadata", "GetProjectSettingsRequest", "ProjectSettings", @@ -183,6 +222,7 @@ "GetVersionRequest", "ListVersionsRequest", "ListVersionsResponse", + "UpdateVersionRequest", "Version", "VersionView", "GetVPCSCConfigRequest", diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py index 739fdfae141c..0b9427f4e8a5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py index e82fc91fac52..68ea6508ff88 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py @@ -48,13 +48,31 @@ NpmPackage, PythonPackage, ) -from .types.file import File, GetFileRequest, Hash, ListFilesRequest, ListFilesResponse +from .types.attachment import ( + Attachment, + CreateAttachmentRequest, + DeleteAttachmentRequest, + GetAttachmentRequest, + ListAttachmentsRequest, + ListAttachmentsResponse, +) +from .types.file import ( + DeleteFileRequest, + File, + GetFileRequest, + Hash, + ListFilesRequest, + ListFilesResponse, + UpdateFileRequest, +) +from .types.generic import GenericArtifact from .types.package import ( DeletePackageRequest, GetPackageRequest, ListPackagesRequest, ListPackagesResponse, Package, + UpdatePackageRequest, ) from .types.repository import ( CleanupPolicy, @@ -71,6 +89,15 @@ UpstreamPolicy, VirtualRepositoryConfig, ) +from .types.rule import ( + CreateRuleRequest, + DeleteRuleRequest, + GetRuleRequest, + ListRulesRequest, + ListRulesResponse, + Rule, + UpdateRuleRequest, +) from .types.service import OperationMetadata from .types.settings import ( GetProjectSettingsRequest, @@ -93,6 +120,7 @@ GetVersionRequest, ListVersionsRequest, ListVersionsResponse, + UpdateVersionRequest, Version, VersionView, ) @@ -114,19 +142,27 @@ "ArtifactRegistryAsyncClient", "AptArtifact", "ArtifactRegistryClient", + "Attachment", "BatchDeleteVersionsMetadata", "BatchDeleteVersionsRequest", "CleanupPolicy", "CleanupPolicyCondition", "CleanupPolicyMostRecentVersions", + "CreateAttachmentRequest", "CreateRepositoryRequest", + "CreateRuleRequest", "CreateTagRequest", + "DeleteAttachmentRequest", + "DeleteFileRequest", "DeletePackageRequest", "DeleteRepositoryRequest", + "DeleteRuleRequest", "DeleteTagRequest", "DeleteVersionRequest", "DockerImage", "File", + "GenericArtifact", + "GetAttachmentRequest", "GetDockerImageRequest", "GetFileRequest", "GetMavenArtifactRequest", @@ -135,6 +171,7 @@ "GetProjectSettingsRequest", "GetPythonPackageRequest", "GetRepositoryRequest", + "GetRuleRequest", "GetTagRequest", "GetVPCSCConfigRequest", "GetVersionRequest", @@ -149,6 +186,8 @@ "ImportYumArtifactsMetadata", "ImportYumArtifactsRequest", "ImportYumArtifactsResponse", + "ListAttachmentsRequest", + "ListAttachmentsResponse", "ListDockerImagesRequest", "ListDockerImagesResponse", "ListFilesRequest", @@ -163,6 +202,8 @@ "ListPythonPackagesResponse", "ListRepositoriesRequest", "ListRepositoriesResponse", + "ListRulesRequest", + "ListRulesResponse", "ListTagsRequest", "ListTagsResponse", "ListVersionsRequest", @@ -175,11 +216,16 @@ "PythonPackage", "RemoteRepositoryConfig", "Repository", + "Rule", "Tag", + "UpdateFileRequest", + "UpdatePackageRequest", "UpdateProjectSettingsRequest", "UpdateRepositoryRequest", + "UpdateRuleRequest", "UpdateTagRequest", "UpdateVPCSCConfigRequest", + "UpdateVersionRequest", "UpstreamPolicy", "VPCSCConfig", "Version", diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json index 2f151d61754f..7824e41e78fd 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json @@ -15,16 +15,36 @@ "batch_delete_versions" ] }, + "CreateAttachment": { + "methods": [ + "create_attachment" + ] + }, "CreateRepository": { "methods": [ "create_repository" ] }, + "CreateRule": { + "methods": [ + "create_rule" + ] + }, "CreateTag": { "methods": [ "create_tag" ] }, + "DeleteAttachment": { + "methods": [ + "delete_attachment" + ] + }, + "DeleteFile": { + "methods": [ + "delete_file" + ] + }, "DeletePackage": { "methods": [ "delete_package" @@ -35,6 +55,11 @@ "delete_repository" ] }, + "DeleteRule": { + "methods": [ + "delete_rule" + ] + }, "DeleteTag": { "methods": [ "delete_tag" @@ -45,6 +70,11 @@ "delete_version" ] }, + "GetAttachment": { + "methods": [ + "get_attachment" + ] + }, "GetDockerImage": { "methods": [ "get_docker_image" @@ -90,6 +120,11 @@ "get_repository" ] }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, "GetTag": { "methods": [ "get_tag" @@ -115,6 +150,11 @@ "import_yum_artifacts" ] }, + "ListAttachments": { + "methods": [ + "list_attachments" + ] + }, "ListDockerImages": { "methods": [ "list_docker_images" @@ -150,6 +190,11 @@ "list_repositories" ] }, + "ListRules": { + "methods": [ + "list_rules" + ] + }, "ListTags": { "methods": [ "list_tags" @@ -170,6 +215,16 @@ "test_iam_permissions" ] }, + "UpdateFile": { + "methods": [ + "update_file" + ] + }, + "UpdatePackage": { + "methods": [ + "update_package" + ] + }, "UpdateProjectSettings": { "methods": [ "update_project_settings" @@ -180,6 +235,11 @@ "update_repository" ] }, + "UpdateRule": { + "methods": [ + "update_rule" + ] + }, "UpdateTag": { "methods": [ "update_tag" @@ -189,6 +249,11 @@ "methods": [ "update_vpcsc_config" ] + }, + "UpdateVersion": { + "methods": [ + "update_version" + ] } } }, @@ -200,16 +265,36 @@ "batch_delete_versions" ] }, + "CreateAttachment": { + "methods": [ + "create_attachment" + ] + }, "CreateRepository": { "methods": [ "create_repository" ] }, + "CreateRule": { + "methods": [ + "create_rule" + ] + }, "CreateTag": { "methods": [ "create_tag" ] }, + "DeleteAttachment": { + "methods": [ + "delete_attachment" + ] + }, + "DeleteFile": { + "methods": [ + "delete_file" + ] + }, "DeletePackage": { "methods": [ "delete_package" @@ -220,6 +305,11 @@ "delete_repository" ] }, + "DeleteRule": { + "methods": [ + "delete_rule" + ] + }, "DeleteTag": { "methods": [ "delete_tag" @@ -230,6 +320,11 @@ "delete_version" ] }, + "GetAttachment": { + "methods": [ + "get_attachment" + ] + }, "GetDockerImage": { "methods": [ "get_docker_image" @@ -275,6 +370,11 @@ "get_repository" ] }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, "GetTag": { "methods": [ "get_tag" @@ -300,6 +400,11 @@ "import_yum_artifacts" ] }, + "ListAttachments": { + "methods": [ + "list_attachments" + ] + }, "ListDockerImages": { "methods": [ "list_docker_images" @@ -335,6 +440,11 @@ "list_repositories" ] }, + "ListRules": { + "methods": [ + "list_rules" + ] + }, "ListTags": { "methods": [ "list_tags" @@ -355,6 +465,16 @@ "test_iam_permissions" ] }, + "UpdateFile": { + "methods": [ + "update_file" + ] + }, + "UpdatePackage": { + "methods": [ + "update_package" + ] + }, "UpdateProjectSettings": { "methods": [ "update_project_settings" @@ -365,6 +485,11 @@ "update_repository" ] }, + "UpdateRule": { + "methods": [ + "update_rule" + ] + }, "UpdateTag": { "methods": [ "update_tag" @@ -374,6 +499,11 @@ "methods": [ "update_vpcsc_config" ] + }, + "UpdateVersion": { + "methods": [ + "update_version" + ] } } }, @@ -385,16 +515,36 @@ "batch_delete_versions" ] }, + "CreateAttachment": { + "methods": [ + "create_attachment" + ] + }, "CreateRepository": { "methods": [ "create_repository" ] }, + "CreateRule": { + "methods": [ + "create_rule" + ] + }, "CreateTag": { "methods": [ "create_tag" ] }, + "DeleteAttachment": { + "methods": [ + "delete_attachment" + ] + }, + "DeleteFile": { + "methods": [ + "delete_file" + ] + }, "DeletePackage": { "methods": [ "delete_package" @@ -405,6 +555,11 @@ "delete_repository" ] }, + "DeleteRule": { + "methods": [ + "delete_rule" + ] + }, "DeleteTag": { "methods": [ "delete_tag" @@ -415,6 +570,11 @@ "delete_version" ] }, + "GetAttachment": { + "methods": [ + "get_attachment" + ] + }, "GetDockerImage": { "methods": [ "get_docker_image" @@ -460,6 +620,11 @@ "get_repository" ] }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, "GetTag": { "methods": [ "get_tag" @@ -485,6 +650,11 @@ "import_yum_artifacts" ] }, + "ListAttachments": { + "methods": [ + "list_attachments" + ] + }, "ListDockerImages": { "methods": [ "list_docker_images" @@ -520,6 +690,11 @@ "list_repositories" ] }, + "ListRules": { + "methods": [ + "list_rules" + ] + }, "ListTags": { "methods": [ "list_tags" @@ -540,6 +715,16 @@ "test_iam_permissions" ] }, + "UpdateFile": { + "methods": [ + "update_file" + ] + }, + "UpdatePackage": { + "methods": [ + "update_package" + ] + }, "UpdateProjectSettings": { "methods": [ "update_project_settings" @@ -550,6 +735,11 @@ "update_repository" ] }, + "UpdateRule": { + "methods": [ + "update_rule" + ] + }, "UpdateTag": { "methods": [ "update_tag" @@ -559,6 +749,11 @@ "methods": [ "update_vpcsc_config" ] + }, + "UpdateVersion": { + "methods": [ + "update_version" + ] } } } diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py index 739fdfae141c..0b9427f4e8a5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py index d94e4e91b50d..08f7cb81309b 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py @@ -52,16 +52,26 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore from google.cloud.artifactregistry_v1.services.artifact_registry import pagers -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import service, settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -99,6 +109,8 @@ class ArtifactRegistryAsyncClient: parse_apt_artifact_path = staticmethod( ArtifactRegistryClient.parse_apt_artifact_path ) + attachment_path = staticmethod(ArtifactRegistryClient.attachment_path) + parse_attachment_path = staticmethod(ArtifactRegistryClient.parse_attachment_path) docker_image_path = staticmethod(ArtifactRegistryClient.docker_image_path) parse_docker_image_path = staticmethod( ArtifactRegistryClient.parse_docker_image_path @@ -123,6 +135,8 @@ class ArtifactRegistryAsyncClient: ) repository_path = staticmethod(ArtifactRegistryClient.repository_path) parse_repository_path = staticmethod(ArtifactRegistryClient.parse_repository_path) + rule_path = staticmethod(ArtifactRegistryClient.rule_path) + parse_rule_path = staticmethod(ArtifactRegistryClient.parse_rule_path) secret_version_path = staticmethod(ArtifactRegistryClient.secret_version_path) parse_secret_version_path = staticmethod( ArtifactRegistryClient.parse_secret_version_path @@ -2906,6 +2920,126 @@ async def sample_batch_delete_versions(): # Done; return the response. return response + async def update_version( + self, + request: Optional[Union[gda_version.UpdateVersionRequest, dict]] = None, + *, + version: Optional[gda_version.Version] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_version.Version: + r"""Updates a version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_update_version(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateVersionRequest( + ) + + # Make the request + response = await client.update_version(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdateVersionRequest, dict]]): + The request object. The request to update a version. + version (:class:`google.cloud.artifactregistry_v1.types.Version`): + Required. The Version that replaces + the resource on the server. + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Version: + The body of a version resource. A + version resource represents a collection + of components, such as files and other + data. This may correspond to a version + in many package management schemes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([version, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_version.UpdateVersionRequest): + request = gda_version.UpdateVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if version is not None: + request.version = version + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_version + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("version.name", request.version.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_files( self, request: Optional[Union[file.ListFilesRequest, dict]] = None, @@ -3131,16 +3265,18 @@ async def sample_get_file(): # Done; return the response. return response - async def list_tags( + async def delete_file( self, - request: Optional[Union[tag.ListTagsRequest, dict]] = None, + request: Optional[Union[file.DeleteFileRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTagsAsyncPager: - r"""Lists tags. + ) -> operation_async.AsyncOperation: + r"""Deletes a file and all of its content. It is only + allowed on generic repositories. The returned operation + will complete once the file has been deleted. .. code-block:: python @@ -3153,30 +3289,33 @@ async def list_tags( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - async def sample_list_tags(): + async def sample_delete_file(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = artifactregistry_v1.ListTagsRequest( + request = artifactregistry_v1.DeleteFileRequest( + name="name_value", ) # Make the request - page_result = client.list_tags(request=request) + operation = client.delete_file(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.artifactregistry_v1.types.ListTagsRequest, dict]]): - The request object. The request to list tags. - parent (:class:`str`): - The name of the parent package whose tags will be - listed. For example: - ``projects/p1/locations/us-central1/repositories/repo1/packages/pkg1``. + request (Optional[Union[google.cloud.artifactregistry_v1.types.DeleteFileRequest, dict]]): + The request object. The request to delete a file. + name (:class:`str`): + Required. The name of the file to + delete. - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3186,18 +3325,25 @@ async def sample_list_tags(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsAsyncPager: - The response from listing tags. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. - Iterating over this object will yield - results and resolve additional pages - automatically. + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3206,24 +3352,24 @@ async def sample_list_tags(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, tag.ListTagsRequest): - request = tag.ListTagsRequest(request) + if not isinstance(request, file.DeleteFileRequest): + request = file.DeleteFileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_tags + self._client._transport.delete_file ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3237,30 +3383,28 @@ async def sample_list_tags(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTagsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, ) # Done; return the response. return response - async def get_tag( + async def update_file( self, - request: Optional[Union[tag.GetTagRequest, dict]] = None, + request: Optional[Union[gda_file.UpdateFileRequest, dict]] = None, *, - name: Optional[str] = None, + file: Optional[gda_file.File] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> tag.Tag: - r"""Gets a tag. + ) -> gda_file.File: + r"""Updates a file. .. code-block:: python @@ -3273,26 +3417,36 @@ async def get_tag( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - async def sample_get_tag(): + async def sample_update_file(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = artifactregistry_v1.GetTagRequest( + request = artifactregistry_v1.UpdateFileRequest( ) # Make the request - response = await client.get_tag(request=request) + response = await client.update_file(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.artifactregistry_v1.types.GetTagRequest, dict]]): - The request object. The request to retrieve a tag. - name (:class:`str`): - The name of the tag to retrieve. - This corresponds to the ``name`` field + request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdateFileRequest, dict]]): + The request object. The request to update a file. + file (:class:`google.cloud.artifactregistry_v1.types.File`): + Required. The File that replaces the + resource on the server. + + This corresponds to the ``file`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The update mask applies to the resource. For + the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3302,16 +3456,16 @@ async def sample_get_tag(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.Tag: - Tags point to a version and represent - an alternative name that can be used to - access the version. + google.cloud.artifactregistry_v1.types.File: + Files store content that is + potentially associated with Packages or + Versions. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([file, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3320,22 +3474,28 @@ async def sample_get_tag(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, tag.GetTagRequest): - request = tag.GetTagRequest(request) + if not isinstance(request, gda_file.UpdateFileRequest): + request = gda_file.UpdateFileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if file is not None: + request.file = file + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_tag] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_file + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("file.name", request.file.name),) + ), ) # Validate the universe domain. @@ -3352,18 +3512,16 @@ async def sample_get_tag(): # Done; return the response. return response - async def create_tag( + async def list_tags( self, - request: Optional[Union[gda_tag.CreateTagRequest, dict]] = None, + request: Optional[Union[tag.ListTagsRequest, dict]] = None, *, parent: Optional[str] = None, - tag: Optional[gda_tag.Tag] = None, - tag_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gda_tag.Tag: - r"""Creates a tag. + ) -> pagers.ListTagsAsyncPager: + r"""Lists tags. .. code-block:: python @@ -3376,42 +3534,32 @@ async def create_tag( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - async def sample_create_tag(): + async def sample_list_tags(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = artifactregistry_v1.CreateTagRequest( + request = artifactregistry_v1.ListTagsRequest( ) # Make the request - response = await client.create_tag(request=request) + page_result = client.list_tags(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.artifactregistry_v1.types.CreateTagRequest, dict]]): - The request object. The request to create a new tag. + request (Optional[Union[google.cloud.artifactregistry_v1.types.ListTagsRequest, dict]]): + The request object. The request to list tags. parent (:class:`str`): - The name of the parent resource where - the tag will be created. + The name of the parent package whose tags will be + listed. For example: + ``projects/p1/locations/us-central1/repositories/repo1/packages/pkg1``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - tag (:class:`google.cloud.artifactregistry_v1.types.Tag`): - The tag to be created. - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_id (:class:`str`): - The tag id to use for this - repository. - - This corresponds to the ``tag_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3419,8 +3567,241 @@ async def sample_create_tag(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.Tag: - Tags point to a version and represent + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsAsyncPager: + The response from listing tags. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, tag.ListTagsRequest): + request = tag.ListTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_tags + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTagsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_tag( + self, + request: Optional[Union[tag.GetTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tag.Tag: + r"""Gets a tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_get_tag(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetTagRequest( + ) + + # Make the request + response = await client.get_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.GetTagRequest, dict]]): + The request object. The request to retrieve a tag. + name (:class:`str`): + The name of the tag to retrieve. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Tag: + Tags point to a version and represent + an alternative name that can be used to + access the version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, tag.GetTagRequest): + request = tag.GetTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_tag( + self, + request: Optional[Union[gda_tag.CreateTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag: Optional[gda_tag.Tag] = None, + tag_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_tag.Tag: + r"""Creates a tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_create_tag(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.CreateTagRequest( + ) + + # Make the request + response = await client.create_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.CreateTagRequest, dict]]): + The request object. The request to create a new tag. + parent (:class:`str`): + The name of the parent resource where + the tag will be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag (:class:`google.cloud.artifactregistry_v1.types.Tag`): + The tag to be created. + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_id (:class:`str`): + The tag id to use for this + repository. + + This corresponds to the ``tag_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Tag: + Tags point to a version and represent an alternative name that can be used to access the version. @@ -3682,15 +4063,18 @@ async def sample_delete_tag(): metadata=metadata, ) - async def set_iam_policy( + async def create_rule( self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + request: Optional[Union[gda_rule.CreateRuleRequest, dict]] = None, *, + parent: Optional[str] = None, + rule: Optional[gda_rule.Rule] = None, + rule_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Updates the IAM policy for a given resource. + ) -> gda_rule.Rule: + r"""Creates a rule. .. code-block:: python @@ -3702,84 +4086,1070 @@ async def set_iam_policy( # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - async def sample_set_iam_policy(): + async def sample_create_rule(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", + request = artifactregistry_v1.CreateRuleRequest( + parent="parent_value", ) # Make the request - response = await client.set_iam_policy(request=request) + response = await client.create_rule(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + request (Optional[Union[google.cloud.artifactregistry_v1.types.CreateRuleRequest, dict]]): + The request object. The request to create a new rule. + parent (:class:`str`): + Required. The name of the parent + resource where the rule will be created. - Returns: + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule (:class:`google.cloud.artifactregistry_v1.types.Rule`): + The rule to be created. + This corresponds to the ``rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (:class:`str`): + The rule id to use for this + repository. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, rule, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_rule.CreateRuleRequest): + request = gda_rule.CreateRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if rule is not None: + request.rule = rule + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_rules( + self, + request: Optional[Union[rule.ListRulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRulesAsyncPager: + r"""Lists rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_list_rules(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ListRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_rules(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.ListRulesRequest, dict]]): + The request object. The request to list rules. + parent (:class:`str`): + Required. The name of the parent repository whose rules + will be listed. For example: + ``projects/p1/locations/us-central1/repositories/repo1``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRulesAsyncPager: + The response from listing rules. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rule.ListRulesRequest): + request = rule.ListRulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_rules + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRulesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_rule( + self, + request: Optional[Union[rule.GetRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rule.Rule: + r"""Gets a rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_get_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetRuleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.GetRuleRequest, dict]]): + The request object. The request to retrieve a rule. + name (:class:`str`): + Required. The name of the rule to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rule.GetRuleRequest): + request = rule.GetRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_rule( + self, + request: Optional[Union[gda_rule.UpdateRuleRequest, dict]] = None, + *, + rule: Optional[gda_rule.Rule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_rule.Rule: + r"""Updates a rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_update_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateRuleRequest( + ) + + # Make the request + response = await client.update_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdateRuleRequest, dict]]): + The request object. The request to update a rule. + rule (:class:`google.cloud.artifactregistry_v1.types.Rule`): + The rule that replaces the resource + on the server. + + This corresponds to the ``rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([rule, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_rule.UpdateRuleRequest): + request = gda_rule.UpdateRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if rule is not None: + request.rule = rule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("rule.name", request.rule.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_rule( + self, + request: Optional[Union[rule.DeleteRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_delete_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteRuleRequest( + name="name_value", + ) + + # Make the request + await client.delete_rule(request=request) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.DeleteRuleRequest, dict]]): + The request object. The request to delete a rule. + name (:class:`str`): + Required. The name of the rule to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rule.DeleteRuleRequest): + request = rule.DeleteRuleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Updates the IAM policy for a given resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_iam_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a given resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: google.iam.v1.policy_pb2.Policy: An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_iam_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests if the caller has a list of permissions on a + resource. + + .. code-block:: python - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore - **JSON example:** + async def sample_test_iam_permissions(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) - **YAML example:** + # Make the request + response = await client.test_iam_permissions(request=request) - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + # Handle the response + print(response) - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_project_settings( + self, + request: Optional[Union[settings.GetProjectSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> settings.ProjectSettings: + r"""Retrieves the Settings for the Project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_get_project_settings(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetProjectSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_project_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest, dict]]): + The request object. Gets the redirection status for a + project. + name (:class:`str`): + Required. The name of the + projectSettings resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.ProjectSettings: + The Artifact Registry settings that + apply to a Project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, settings.GetProjectSettingsRequest): + request = settings.GetProjectSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_project_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_project_settings( + self, + request: Optional[Union[settings.UpdateProjectSettingsRequest, dict]] = None, + *, + project_settings: Optional[settings.ProjectSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> settings.ProjectSettings: + r"""Updates the Settings for the Project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_update_project_settings(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateProjectSettingsRequest( + ) + + # Make the request + response = await client.update_project_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest, dict]]): + The request object. Sets the settings of the project. + project_settings (:class:`google.cloud.artifactregistry_v1.types.ProjectSettings`): + The project settings. + This corresponds to the ``project_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Field mask to support partial + updates. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.ProjectSettings: + The Artifact Registry settings that + apply to a Project. """ # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest() + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, settings.UpdateProjectSettingsRequest): + request = settings.UpdateProjectSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_settings is not None: + request.project_settings = project_settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.set_iam_policy + self._client._transport.update_project_settings ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("project_settings.name", request.project_settings.name),) + ), ) # Validate the universe domain. @@ -3796,15 +5166,16 @@ async def sample_set_iam_policy(): # Done; return the response. return response - async def get_iam_policy( + async def get_vpcsc_config( self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + request: Optional[Union[vpcsc_config.GetVPCSCConfigRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for a given resource. + ) -> vpcsc_config.VPCSCConfig: + r"""Retrieves the VPCSC Config for the Project. .. code-block:: python @@ -3816,26 +5187,32 @@ async def get_iam_policy( # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - async def sample_get_iam_policy(): + async def sample_get_vpcsc_config(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", + request = artifactregistry_v1.GetVPCSCConfigRequest( + name="name_value", ) # Make the request - response = await client.get_iam_policy(request=request) + response = await client.get_vpcsc_config(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. + request (Optional[Union[google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest, dict]]): + The request object. Gets the VPC SC config for a project. + name (:class:`str`): + Required. The name of the VPCSCConfig + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3843,57 +5220,157 @@ async def sample_get_iam_policy(): sent along with the request as metadata. Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. + google.cloud.artifactregistry_v1.types.VPCSCConfig: + The Artifact Registry VPC SC config + that apply to a Project. - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpcsc_config.GetVPCSCConfigRequest): + request = vpcsc_config.GetVPCSCConfigRequest(request) - **JSON example:** + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_vpcsc_config + ] - **YAML example:** + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + # Validate the universe domain. + self._client._validate_universe_domain() - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_vpcsc_config( + self, + request: Optional[ + Union[gda_vpcsc_config.UpdateVPCSCConfigRequest, dict] + ] = None, + *, + vpcsc_config: Optional[gda_vpcsc_config.VPCSCConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_vpcsc_config.VPCSCConfig: + r"""Updates the VPCSC Config for the Project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_update_vpcsc_config(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateVPCSCConfigRequest( + ) + + # Make the request + response = await client.update_vpcsc_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest, dict]]): + The request object. Sets the VPCSC config of the project. + vpcsc_config (:class:`google.cloud.artifactregistry_v1.types.VPCSCConfig`): + The project config. + This corresponds to the ``vpcsc_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Field mask to support partial + updates. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.VPCSCConfig: + The Artifact Registry VPC SC config + that apply to a Project. """ # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest() + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([vpcsc_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_vpcsc_config.UpdateVPCSCConfigRequest): + request = gda_vpcsc_config.UpdateVPCSCConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vpcsc_config is not None: + request.vpcsc_config = vpcsc_config + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_iam_policy + self._client._transport.update_vpcsc_config ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("vpcsc_config.name", request.vpcsc_config.name),) + ), ) # Validate the universe domain. @@ -3910,16 +5387,17 @@ async def sample_get_iam_policy(): # Done; return the response. return response - async def test_iam_permissions( + async def update_package( self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + request: Optional[Union[gda_package.UpdatePackageRequest, dict]] = None, *, + package: Optional[gda_package.Package] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests if the caller has a list of permissions on a - resource. + ) -> gda_package.Package: + r"""Updates a package. .. code-block:: python @@ -3931,27 +5409,39 @@ async def test_iam_permissions( # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - async def sample_test_iam_permissions(): + async def sample_update_package(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], + request = artifactregistry_v1.UpdatePackageRequest( ) # Make the request - response = await client.test_iam_permissions(request=request) + response = await client.update_package(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. + request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdatePackageRequest, dict]]): + The request object. The request to update a package. + package (:class:`google.cloud.artifactregistry_v1.types.Package`): + The package that replaces the + resource on the server. + + This corresponds to the ``package`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3959,27 +5449,45 @@ async def sample_test_iam_permissions(): sent along with the request as metadata. Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. + google.cloud.artifactregistry_v1.types.Package: + Packages are named collections of + versions. + """ # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest() + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([package, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_package.UpdatePackageRequest): + request = gda_package.UpdatePackageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if package is not None: + request.package = package + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.test_iam_permissions + self._client._transport.update_package ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("package.name", request.package.name),) + ), ) # Validate the universe domain. @@ -3996,16 +5504,16 @@ async def sample_test_iam_permissions(): # Done; return the response. return response - async def get_project_settings( + async def list_attachments( self, - request: Optional[Union[settings.GetProjectSettingsRequest, dict]] = None, + request: Optional[Union[attachment.ListAttachmentsRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> settings.ProjectSettings: - r"""Retrieves the Settings for the Project. + ) -> pagers.ListAttachmentsAsyncPager: + r"""Lists attachments. .. code-block:: python @@ -4018,30 +5526,31 @@ async def get_project_settings( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - async def sample_get_project_settings(): + async def sample_list_attachments(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = artifactregistry_v1.GetProjectSettingsRequest( - name="name_value", + request = artifactregistry_v1.ListAttachmentsRequest( + parent="parent_value", ) # Make the request - response = await client.get_project_settings(request=request) + page_result = client.list_attachments(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest, dict]]): - The request object. Gets the redirection status for a - project. - name (:class:`str`): - Required. The name of the - projectSettings resource. + request (Optional[Union[google.cloud.artifactregistry_v1.types.ListAttachmentsRequest, dict]]): + The request object. The request to list attachments. + parent (:class:`str`): + Required. The name of the parent + resource whose attachments will be + listed. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4051,15 +5560,18 @@ async def sample_get_project_settings(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.ProjectSettings: - The Artifact Registry settings that - apply to a Project. + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListAttachmentsAsyncPager: + The response from listing + attachments. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4068,24 +5580,24 @@ async def sample_get_project_settings(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, settings.GetProjectSettingsRequest): - request = settings.GetProjectSettingsRequest(request) + if not isinstance(request, attachment.ListAttachmentsRequest): + request = attachment.ListAttachmentsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_project_settings + self._client._transport.list_attachments ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -4099,20 +5611,30 @@ async def sample_get_project_settings(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAttachmentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - async def update_project_settings( + async def get_attachment( self, - request: Optional[Union[settings.UpdateProjectSettingsRequest, dict]] = None, + request: Optional[Union[attachment.GetAttachmentRequest, dict]] = None, *, - project_settings: Optional[settings.ProjectSettings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> settings.ProjectSettings: - r"""Updates the Settings for the Project. + ) -> attachment.Attachment: + r"""Gets an attachment. .. code-block:: python @@ -4125,33 +5647,30 @@ async def update_project_settings( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - async def sample_update_project_settings(): + async def sample_get_attachment(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = artifactregistry_v1.UpdateProjectSettingsRequest( + request = artifactregistry_v1.GetAttachmentRequest( + name="name_value", ) # Make the request - response = await client.update_project_settings(request=request) + response = await client.get_attachment(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest, dict]]): - The request object. Sets the settings of the project. - project_settings (:class:`google.cloud.artifactregistry_v1.types.ProjectSettings`): - The project settings. - This corresponds to the ``project_settings`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Field mask to support partial - updates. + request (Optional[Union[google.cloud.artifactregistry_v1.types.GetAttachmentRequest, dict]]): + The request object. The request to retrieve an + attachment. + name (:class:`str`): + Required. The name of the attachment + to retrieve. - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4161,15 +5680,18 @@ async def sample_update_project_settings(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.ProjectSettings: - The Artifact Registry settings that - apply to a Project. + google.cloud.artifactregistry_v1.types.Attachment: + An Attachment refers to additional + metadata that can be attached to + artifacts in Artifact Registry. An + attachment consists of one or more + files. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_settings, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4178,28 +5700,24 @@ async def sample_update_project_settings(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, settings.UpdateProjectSettingsRequest): - request = settings.UpdateProjectSettingsRequest(request) + if not isinstance(request, attachment.GetAttachmentRequest): + request = attachment.GetAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_settings is not None: - request.project_settings = project_settings - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_project_settings + self._client._transport.get_attachment ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_settings.name", request.project_settings.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4216,16 +5734,20 @@ async def sample_update_project_settings(): # Done; return the response. return response - async def get_vpcsc_config( + async def create_attachment( self, - request: Optional[Union[vpcsc_config.GetVPCSCConfigRequest, dict]] = None, + request: Optional[Union[gda_attachment.CreateAttachmentRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + attachment: Optional[gda_attachment.Attachment] = None, + attachment_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vpcsc_config.VPCSCConfig: - r"""Retrieves the VPCSC Config for the Project. + ) -> operation_async.AsyncOperation: + r"""Creates an attachment. The returned Operation will + finish once the attachment has been created. Its + response will be the created attachment. .. code-block:: python @@ -4238,29 +5760,55 @@ async def get_vpcsc_config( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - async def sample_get_vpcsc_config(): + async def sample_create_attachment(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = artifactregistry_v1.GetVPCSCConfigRequest( - name="name_value", + attachment = artifactregistry_v1.Attachment() + attachment.target = "target_value" + attachment.files = ['files_value1', 'files_value2'] + + request = artifactregistry_v1.CreateAttachmentRequest( + parent="parent_value", + attachment_id="attachment_id_value", + attachment=attachment, ) # Make the request - response = await client.get_vpcsc_config(request=request) + operation = client.create_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest, dict]]): - The request object. Gets the VPC SC config for a project. - name (:class:`str`): - Required. The name of the VPCSCConfig - resource. + request (Optional[Union[google.cloud.artifactregistry_v1.types.CreateAttachmentRequest, dict]]): + The request object. The request to create a new + attachment. + parent (:class:`str`): + Required. The name of the parent + resource where the attachment will be + created. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attachment (:class:`google.cloud.artifactregistry_v1.types.Attachment`): + Required. The attachment to be + created. + + This corresponds to the ``attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attachment_id (:class:`str`): + Required. The attachment id to use + for this attachment. + + This corresponds to the ``attachment_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4270,15 +5818,18 @@ async def sample_get_vpcsc_config(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.VPCSCConfig: - The Artifact Registry VPC SC config - that apply to a Project. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.artifactregistry_v1.types.Attachment` An Attachment refers to additional metadata that can be attached to + artifacts in Artifact Registry. An attachment + consists of one or more files. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, attachment, attachment_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4287,24 +5838,28 @@ async def sample_get_vpcsc_config(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, vpcsc_config.GetVPCSCConfigRequest): - request = vpcsc_config.GetVPCSCConfigRequest(request) + if not isinstance(request, gda_attachment.CreateAttachmentRequest): + request = gda_attachment.CreateAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if attachment is not None: + request.attachment = attachment + if attachment_id is not None: + request.attachment_id = attachment_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_vpcsc_config + self._client._transport.create_attachment ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -4318,22 +5873,29 @@ async def sample_get_vpcsc_config(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gda_attachment.Attachment, + metadata_type=service.OperationMetadata, + ) + # Done; return the response. return response - async def update_vpcsc_config( + async def delete_attachment( self, - request: Optional[ - Union[gda_vpcsc_config.UpdateVPCSCConfigRequest, dict] - ] = None, + request: Optional[Union[attachment.DeleteAttachmentRequest, dict]] = None, *, - vpcsc_config: Optional[gda_vpcsc_config.VPCSCConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gda_vpcsc_config.VPCSCConfig: - r"""Updates the VPCSC Config for the Project. + ) -> operation_async.AsyncOperation: + r"""Deletes an attachment. The returned Operation will finish once + the attachments has been deleted. It will not have any Operation + metadata and will return a ``google.protobuf.Empty`` response. .. code-block:: python @@ -4346,33 +5908,33 @@ async def update_vpcsc_config( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - async def sample_update_vpcsc_config(): + async def sample_delete_attachment(): # Create a client client = artifactregistry_v1.ArtifactRegistryAsyncClient() # Initialize request argument(s) - request = artifactregistry_v1.UpdateVPCSCConfigRequest( + request = artifactregistry_v1.DeleteAttachmentRequest( + name="name_value", ) # Make the request - response = await client.update_vpcsc_config(request=request) + operation = client.delete_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest, dict]]): - The request object. Sets the VPCSC config of the project. - vpcsc_config (:class:`google.cloud.artifactregistry_v1.types.VPCSCConfig`): - The project config. - This corresponds to the ``vpcsc_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Field mask to support partial - updates. + request (Optional[Union[google.cloud.artifactregistry_v1.types.DeleteAttachmentRequest, dict]]): + The request object. The request to delete an attachment. + name (:class:`str`): + Required. The name of the attachment + to delete. - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4382,15 +5944,25 @@ async def sample_update_vpcsc_config(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.VPCSCConfig: - The Artifact Registry VPC SC config - that apply to a Project. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([vpcsc_config, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4399,28 +5971,24 @@ async def sample_update_vpcsc_config(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, gda_vpcsc_config.UpdateVPCSCConfigRequest): - request = gda_vpcsc_config.UpdateVPCSCConfigRequest(request) + if not isinstance(request, attachment.DeleteAttachmentRequest): + request = attachment.DeleteAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if vpcsc_config is not None: - request.vpcsc_config = vpcsc_config - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_vpcsc_config + self._client._transport.delete_attachment ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("vpcsc_config.name", request.vpcsc_config.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4434,6 +6002,14 @@ async def sample_update_vpcsc_config(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + # Done; return the response. return response diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py index adde437957cc..fd86532583e5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py @@ -58,16 +58,26 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore from google.cloud.artifactregistry_v1.services.artifact_registry import pagers -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import service, settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -240,6 +250,30 @@ def parse_apt_artifact_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def attachment_path( + project: str, + location: str, + repository: str, + attachment: str, + ) -> str: + """Returns a fully-qualified attachment string.""" + return "projects/{project}/locations/{location}/repositories/{repository}/attachments/{attachment}".format( + project=project, + location=location, + repository=repository, + attachment=attachment, + ) + + @staticmethod + def parse_attachment_path(path: str) -> Dict[str, str]: + """Parses a attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/repositories/(?P.+?)/attachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def docker_image_path( project: str, @@ -423,6 +457,30 @@ def parse_repository_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def rule_path( + project: str, + location: str, + repository: str, + rule: str, + ) -> str: + """Returns a fully-qualified rule string.""" + return "projects/{project}/locations/{location}/repositories/{repository}/rules/{rule}".format( + project=project, + location=location, + repository=repository, + rule=rule, + ) + + @staticmethod + def parse_rule_path(path: str) -> Dict[str, str]: + """Parses a rule path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/repositories/(?P.+?)/rules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def secret_version_path( project: str, @@ -796,36 +854,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ArtifactRegistryClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -835,13 +863,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ArtifactRegistryClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -3526,6 +3550,123 @@ def sample_batch_delete_versions(): # Done; return the response. return response + def update_version( + self, + request: Optional[Union[gda_version.UpdateVersionRequest, dict]] = None, + *, + version: Optional[gda_version.Version] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_version.Version: + r"""Updates a version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_update_version(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateVersionRequest( + ) + + # Make the request + response = client.update_version(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.UpdateVersionRequest, dict]): + The request object. The request to update a version. + version (google.cloud.artifactregistry_v1.types.Version): + Required. The Version that replaces + the resource on the server. + + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Version: + The body of a version resource. A + version resource represents a collection + of components, such as files and other + data. This may correspond to a version + in many package management schemes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([version, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_version.UpdateVersionRequest): + request = gda_version.UpdateVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if version is not None: + request.version = version + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("version.name", request.version.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def list_files( self, request: Optional[Union[file.ListFilesRequest, dict]] = None, @@ -3747,16 +3888,18 @@ def sample_get_file(): # Done; return the response. return response - def list_tags( + def delete_file( self, - request: Optional[Union[tag.ListTagsRequest, dict]] = None, + request: Optional[Union[file.DeleteFileRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTagsPager: - r"""Lists tags. + ) -> operation.Operation: + r"""Deletes a file and all of its content. It is only + allowed on generic repositories. The returned operation + will complete once the file has been deleted. .. code-block:: python @@ -3769,30 +3912,33 @@ def list_tags( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - def sample_list_tags(): + def sample_delete_file(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = artifactregistry_v1.ListTagsRequest( + request = artifactregistry_v1.DeleteFileRequest( + name="name_value", ) # Make the request - page_result = client.list_tags(request=request) + operation = client.delete_file(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.artifactregistry_v1.types.ListTagsRequest, dict]): - The request object. The request to list tags. - parent (str): - The name of the parent package whose tags will be - listed. For example: - ``projects/p1/locations/us-central1/repositories/repo1/packages/pkg1``. + request (Union[google.cloud.artifactregistry_v1.types.DeleteFileRequest, dict]): + The request object. The request to delete a file. + name (str): + Required. The name of the file to + delete. - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3802,18 +3948,25 @@ def sample_list_tags(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsPager: - The response from listing tags. + google.api_core.operation.Operation: + An object representing a long-running operation. - Iterating over this object will yield - results and resolve additional pages - automatically. + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3822,21 +3975,21 @@ def sample_list_tags(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, tag.ListTagsRequest): - request = tag.ListTagsRequest(request) + if not isinstance(request, file.DeleteFileRequest): + request = file.DeleteFileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tags] + rpc = self._transport._wrapped_methods[self._transport.delete_file] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3850,30 +4003,28 @@ def sample_list_tags(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTagsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, ) # Done; return the response. return response - def get_tag( + def update_file( self, - request: Optional[Union[tag.GetTagRequest, dict]] = None, + request: Optional[Union[gda_file.UpdateFileRequest, dict]] = None, *, - name: Optional[str] = None, + file: Optional[gda_file.File] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> tag.Tag: - r"""Gets a tag. + ) -> gda_file.File: + r"""Updates a file. .. code-block:: python @@ -3886,26 +4037,36 @@ def get_tag( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - def sample_get_tag(): + def sample_update_file(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = artifactregistry_v1.GetTagRequest( + request = artifactregistry_v1.UpdateFileRequest( ) # Make the request - response = client.get_tag(request=request) + response = client.update_file(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.artifactregistry_v1.types.GetTagRequest, dict]): - The request object. The request to retrieve a tag. - name (str): - The name of the tag to retrieve. - This corresponds to the ``name`` field + request (Union[google.cloud.artifactregistry_v1.types.UpdateFileRequest, dict]): + The request object. The request to update a file. + file (google.cloud.artifactregistry_v1.types.File): + Required. The File that replaces the + resource on the server. + + This corresponds to the ``file`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The update mask applies to the resource. For + the ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3915,16 +4076,16 @@ def sample_get_tag(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.Tag: - Tags point to a version and represent - an alternative name that can be used to - access the version. + google.cloud.artifactregistry_v1.types.File: + Files store content that is + potentially associated with Packages or + Versions. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([file, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3933,16 +4094,237 @@ def sample_get_tag(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, tag.GetTagRequest): - request = tag.GetTagRequest(request) + if not isinstance(request, gda_file.UpdateFileRequest): + request = gda_file.UpdateFileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if file is not None: + request.file = file + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_tag] + rpc = self._transport._wrapped_methods[self._transport.update_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("file.name", request.file.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tags( + self, + request: Optional[Union[tag.ListTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTagsPager: + r"""Lists tags. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_list_tags(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ListTagsRequest( + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.ListTagsRequest, dict]): + The request object. The request to list tags. + parent (str): + The name of the parent package whose tags will be + listed. For example: + ``projects/p1/locations/us-central1/repositories/repo1/packages/pkg1``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsPager: + The response from listing tags. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, tag.ListTagsRequest): + request = tag.ListTagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTagsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_tag( + self, + request: Optional[Union[tag.GetTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tag.Tag: + r"""Gets a tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_get_tag(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetTagRequest( + ) + + # Make the request + response = client.get_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.GetTagRequest, dict]): + The request object. The request to retrieve a tag. + name (str): + The name of the tag to retrieve. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Tag: + Tags point to a version and represent + an alternative name that can be used to + access the version. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, tag.GetTagRequest): + request = tag.GetTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_tag] # Certain fields should be provided within the metadata header; # add these here. @@ -4285,15 +4667,18 @@ def sample_delete_tag(): metadata=metadata, ) - def set_iam_policy( + def create_rule( self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + request: Optional[Union[gda_rule.CreateRuleRequest, dict]] = None, *, + parent: Optional[str] = None, + rule: Optional[gda_rule.Rule] = None, + rule_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Updates the IAM policy for a given resource. + ) -> gda_rule.Rule: + r"""Creates a rule. .. code-block:: python @@ -4305,26 +4690,44 @@ def set_iam_policy( # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - def sample_set_iam_policy(): + def sample_create_rule(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", + request = artifactregistry_v1.CreateRuleRequest( + parent="parent_value", ) # Make the request - response = client.set_iam_policy(request=request) + response = client.create_rule(request=request) # Handle the response print(response) Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. + request (Union[google.cloud.artifactregistry_v1.types.CreateRuleRequest, dict]): + The request object. The request to create a new rule. + parent (str): + Required. The name of the parent + resource where the rule will be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule (google.cloud.artifactregistry_v1.types.Rule): + The rule to be created. + This corresponds to the ``rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (str): + The rule id to use for this + repository. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4332,56 +4735,1003 @@ def sample_set_iam_policy(): sent along with the request as metadata. Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. + google.cloud.artifactregistry_v1.types.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, rule, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_rule.CreateRuleRequest): + request = gda_rule.CreateRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if rule is not None: + request.rule = rule + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_rules( + self, + request: Optional[Union[rule.ListRulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRulesPager: + r"""Lists rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_list_rules(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ListRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_rules(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.ListRulesRequest, dict]): + The request object. The request to list rules. + parent (str): + Required. The name of the parent repository whose rules + will be listed. For example: + ``projects/p1/locations/us-central1/repositories/repo1``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRulesPager: + The response from listing rules. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rule.ListRulesRequest): + request = rule.ListRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRulesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_rule( + self, + request: Optional[Union[rule.GetRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rule.Rule: + r"""Gets a rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_get_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetRuleRequest( + name="name_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.GetRuleRequest, dict]): + The request object. The request to retrieve a rule. + name (str): + Required. The name of the rule to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rule.GetRuleRequest): + request = rule.GetRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_rule( + self, + request: Optional[Union[gda_rule.UpdateRuleRequest, dict]] = None, + *, + rule: Optional[gda_rule.Rule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_rule.Rule: + r"""Updates a rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_update_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateRuleRequest( + ) + + # Make the request + response = client.update_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.UpdateRuleRequest, dict]): + The request object. The request to update a rule. + rule (google.cloud.artifactregistry_v1.types.Rule): + The rule that replaces the resource + on the server. + + This corresponds to the ``rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([rule, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_rule.UpdateRuleRequest): + request = gda_rule.UpdateRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if rule is not None: + request.rule = rule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("rule.name", request.rule.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_rule( + self, + request: Optional[Union[rule.DeleteRuleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a rule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_delete_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteRuleRequest( + name="name_value", + ) + + # Make the request + client.delete_rule(request=request) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.DeleteRuleRequest, dict]): + The request object. The request to delete a rule. + name (str): + Required. The name of the rule to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, rule.DeleteRuleRequest): + request = rule.DeleteRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Updates the IAM policy for a given resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a given resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if the expression evaluates to true. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + # Done; return the response. + return response - **YAML example:** + def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests if the caller has a list of permissions on a + resource. - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_project_settings( + self, + request: Optional[Union[settings.GetProjectSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> settings.ProjectSettings: + r"""Retrieves the Settings for the Project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_get_project_settings(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetProjectSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_project_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest, dict]): + The request object. Gets the redirection status for a + project. + name (str): + Required. The name of the + projectSettings resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.ProjectSettings: + The Artifact Registry settings that + apply to a Project. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, settings.GetProjectSettingsRequest): + request = settings.GetProjectSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_project_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_project_settings( + self, + request: Optional[Union[settings.UpdateProjectSettingsRequest, dict]] = None, + *, + project_settings: Optional[settings.ProjectSettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> settings.ProjectSettings: + r"""Updates the Settings for the Project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_update_project_settings(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateProjectSettingsRequest( + ) + + # Make the request + response = client.update_project_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest, dict]): + The request object. Sets the settings of the project. + project_settings (google.cloud.artifactregistry_v1.types.ProjectSettings): + The project settings. + This corresponds to the ``project_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Field mask to support partial + updates. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). + Returns: + google.cloud.artifactregistry_v1.types.ProjectSettings: + The Artifact Registry settings that + apply to a Project. """ # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, settings.UpdateProjectSettingsRequest): + request = settings.UpdateProjectSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_settings is not None: + request.project_settings = project_settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + rpc = self._transport._wrapped_methods[self._transport.update_project_settings] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("project_settings.name", request.project_settings.name),) + ), ) # Validate the universe domain. @@ -4398,15 +5748,16 @@ def sample_set_iam_policy(): # Done; return the response. return response - def get_iam_policy( + def get_vpcsc_config( self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + request: Optional[Union[vpcsc_config.GetVPCSCConfigRequest, dict]] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for a given resource. + ) -> vpcsc_config.VPCSCConfig: + r"""Retrieves the VPCSC Config for the Project. .. code-block:: python @@ -4418,26 +5769,32 @@ def get_iam_policy( # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - def sample_get_iam_policy(): + def sample_get_vpcsc_config(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", + request = artifactregistry_v1.GetVPCSCConfigRequest( + name="name_value", ) # Make the request - response = client.get_iam_policy(request=request) + response = client.get_vpcsc_config(request=request) # Handle the response print(response) Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. + request (Union[google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest, dict]): + The request object. Gets the VPC SC config for a project. + name (str): + Required. The name of the VPCSCConfig + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4445,56 +5802,151 @@ def sample_get_iam_policy(): sent along with the request as metadata. Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. + google.cloud.artifactregistry_v1.types.VPCSCConfig: + The Artifact Registry VPC SC config + that apply to a Project. - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpcsc_config.GetVPCSCConfigRequest): + request = vpcsc_config.GetVPCSCConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name - **JSON example:** + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_vpcsc_config] - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) - **YAML example:** + # Validate the universe domain. + self._validate_universe_domain() - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). + # Done; return the response. + return response + + def update_vpcsc_config( + self, + request: Optional[ + Union[gda_vpcsc_config.UpdateVPCSCConfigRequest, dict] + ] = None, + *, + vpcsc_config: Optional[gda_vpcsc_config.VPCSCConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_vpcsc_config.VPCSCConfig: + r"""Updates the VPCSC Config for the Project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_update_vpcsc_config(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateVPCSCConfigRequest( + ) + + # Make the request + response = client.update_vpcsc_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest, dict]): + The request object. Sets the VPCSC config of the project. + vpcsc_config (google.cloud.artifactregistry_v1.types.VPCSCConfig): + The project config. + This corresponds to the ``vpcsc_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Field mask to support partial + updates. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.VPCSCConfig: + The Artifact Registry VPC SC config + that apply to a Project. """ # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([vpcsc_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_vpcsc_config.UpdateVPCSCConfigRequest): + request = gda_vpcsc_config.UpdateVPCSCConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vpcsc_config is not None: + request.vpcsc_config = vpcsc_config + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + rpc = self._transport._wrapped_methods[self._transport.update_vpcsc_config] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("vpcsc_config.name", request.vpcsc_config.name),) + ), ) # Validate the universe domain. @@ -4511,16 +5963,17 @@ def sample_get_iam_policy(): # Done; return the response. return response - def test_iam_permissions( + def update_package( self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + request: Optional[Union[gda_package.UpdatePackageRequest, dict]] = None, *, + package: Optional[gda_package.Package] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests if the caller has a list of permissions on a - resource. + ) -> gda_package.Package: + r"""Updates a package. .. code-block:: python @@ -4532,27 +5985,39 @@ def test_iam_permissions( # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - def sample_test_iam_permissions(): + def sample_update_package(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], + request = artifactregistry_v1.UpdatePackageRequest( ) # Make the request - response = client.test_iam_permissions(request=request) + response = client.update_package(request=request) # Handle the response print(response) Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. + request (Union[google.cloud.artifactregistry_v1.types.UpdatePackageRequest, dict]): + The request object. The request to update a package. + package (google.cloud.artifactregistry_v1.types.Package): + The package that replaces the + resource on the server. + + This corresponds to the ``package`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4560,26 +6025,42 @@ def sample_test_iam_permissions(): sent along with the request as metadata. Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. + google.cloud.artifactregistry_v1.types.Package: + Packages are named collections of + versions. + """ # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([package, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gda_package.UpdatePackageRequest): + request = gda_package.UpdatePackageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if package is not None: + request.package = package + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + rpc = self._transport._wrapped_methods[self._transport.update_package] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("package.name", request.package.name),) + ), ) # Validate the universe domain. @@ -4596,16 +6077,16 @@ def sample_test_iam_permissions(): # Done; return the response. return response - def get_project_settings( + def list_attachments( self, - request: Optional[Union[settings.GetProjectSettingsRequest, dict]] = None, + request: Optional[Union[attachment.ListAttachmentsRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> settings.ProjectSettings: - r"""Retrieves the Settings for the Project. + ) -> pagers.ListAttachmentsPager: + r"""Lists attachments. .. code-block:: python @@ -4618,30 +6099,31 @@ def get_project_settings( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - def sample_get_project_settings(): + def sample_list_attachments(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = artifactregistry_v1.GetProjectSettingsRequest( - name="name_value", + request = artifactregistry_v1.ListAttachmentsRequest( + parent="parent_value", ) # Make the request - response = client.get_project_settings(request=request) + page_result = client.list_attachments(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest, dict]): - The request object. Gets the redirection status for a - project. - name (str): - Required. The name of the - projectSettings resource. + request (Union[google.cloud.artifactregistry_v1.types.ListAttachmentsRequest, dict]): + The request object. The request to list attachments. + parent (str): + Required. The name of the parent + resource whose attachments will be + listed. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4651,15 +6133,18 @@ def sample_get_project_settings(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.ProjectSettings: - The Artifact Registry settings that - apply to a Project. + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListAttachmentsPager: + The response from listing + attachments. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4668,21 +6153,21 @@ def sample_get_project_settings(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, settings.GetProjectSettingsRequest): - request = settings.GetProjectSettingsRequest(request) + if not isinstance(request, attachment.ListAttachmentsRequest): + request = attachment.ListAttachmentsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_project_settings] + rpc = self._transport._wrapped_methods[self._transport.list_attachments] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -4696,20 +6181,30 @@ def sample_get_project_settings(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAttachmentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def update_project_settings( + def get_attachment( self, - request: Optional[Union[settings.UpdateProjectSettingsRequest, dict]] = None, + request: Optional[Union[attachment.GetAttachmentRequest, dict]] = None, *, - project_settings: Optional[settings.ProjectSettings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> settings.ProjectSettings: - r"""Updates the Settings for the Project. + ) -> attachment.Attachment: + r"""Gets an attachment. .. code-block:: python @@ -4722,33 +6217,30 @@ def update_project_settings( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - def sample_update_project_settings(): + def sample_get_attachment(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = artifactregistry_v1.UpdateProjectSettingsRequest( + request = artifactregistry_v1.GetAttachmentRequest( + name="name_value", ) # Make the request - response = client.update_project_settings(request=request) + response = client.get_attachment(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest, dict]): - The request object. Sets the settings of the project. - project_settings (google.cloud.artifactregistry_v1.types.ProjectSettings): - The project settings. - This corresponds to the ``project_settings`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Field mask to support partial - updates. + request (Union[google.cloud.artifactregistry_v1.types.GetAttachmentRequest, dict]): + The request object. The request to retrieve an + attachment. + name (str): + Required. The name of the attachment + to retrieve. - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4758,15 +6250,18 @@ def sample_update_project_settings(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.ProjectSettings: - The Artifact Registry settings that - apply to a Project. + google.cloud.artifactregistry_v1.types.Attachment: + An Attachment refers to additional + metadata that can be attached to + artifacts in Artifact Registry. An + attachment consists of one or more + files. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_settings, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4775,25 +6270,21 @@ def sample_update_project_settings(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, settings.UpdateProjectSettingsRequest): - request = settings.UpdateProjectSettingsRequest(request) + if not isinstance(request, attachment.GetAttachmentRequest): + request = attachment.GetAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_settings is not None: - request.project_settings = project_settings - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_project_settings] + rpc = self._transport._wrapped_methods[self._transport.get_attachment] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_settings.name", request.project_settings.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4810,16 +6301,20 @@ def sample_update_project_settings(): # Done; return the response. return response - def get_vpcsc_config( + def create_attachment( self, - request: Optional[Union[vpcsc_config.GetVPCSCConfigRequest, dict]] = None, + request: Optional[Union[gda_attachment.CreateAttachmentRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + attachment: Optional[gda_attachment.Attachment] = None, + attachment_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vpcsc_config.VPCSCConfig: - r"""Retrieves the VPCSC Config for the Project. + ) -> operation.Operation: + r"""Creates an attachment. The returned Operation will + finish once the attachment has been created. Its + response will be the created attachment. .. code-block:: python @@ -4832,29 +6327,55 @@ def get_vpcsc_config( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - def sample_get_vpcsc_config(): + def sample_create_attachment(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = artifactregistry_v1.GetVPCSCConfigRequest( - name="name_value", + attachment = artifactregistry_v1.Attachment() + attachment.target = "target_value" + attachment.files = ['files_value1', 'files_value2'] + + request = artifactregistry_v1.CreateAttachmentRequest( + parent="parent_value", + attachment_id="attachment_id_value", + attachment=attachment, ) # Make the request - response = client.get_vpcsc_config(request=request) + operation = client.create_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest, dict]): - The request object. Gets the VPC SC config for a project. - name (str): - Required. The name of the VPCSCConfig - resource. + request (Union[google.cloud.artifactregistry_v1.types.CreateAttachmentRequest, dict]): + The request object. The request to create a new + attachment. + parent (str): + Required. The name of the parent + resource where the attachment will be + created. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attachment (google.cloud.artifactregistry_v1.types.Attachment): + Required. The attachment to be + created. + + This corresponds to the ``attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attachment_id (str): + Required. The attachment id to use + for this attachment. + + This corresponds to the ``attachment_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4864,15 +6385,18 @@ def sample_get_vpcsc_config(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.VPCSCConfig: - The Artifact Registry VPC SC config - that apply to a Project. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.artifactregistry_v1.types.Attachment` An Attachment refers to additional metadata that can be attached to + artifacts in Artifact Registry. An attachment + consists of one or more files. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, attachment, attachment_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4881,21 +6405,25 @@ def sample_get_vpcsc_config(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, vpcsc_config.GetVPCSCConfigRequest): - request = vpcsc_config.GetVPCSCConfigRequest(request) + if not isinstance(request, gda_attachment.CreateAttachmentRequest): + request = gda_attachment.CreateAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if attachment is not None: + request.attachment = attachment + if attachment_id is not None: + request.attachment_id = attachment_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_vpcsc_config] + rpc = self._transport._wrapped_methods[self._transport.create_attachment] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -4909,22 +6437,29 @@ def sample_get_vpcsc_config(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gda_attachment.Attachment, + metadata_type=service.OperationMetadata, + ) + # Done; return the response. return response - def update_vpcsc_config( + def delete_attachment( self, - request: Optional[ - Union[gda_vpcsc_config.UpdateVPCSCConfigRequest, dict] - ] = None, + request: Optional[Union[attachment.DeleteAttachmentRequest, dict]] = None, *, - vpcsc_config: Optional[gda_vpcsc_config.VPCSCConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gda_vpcsc_config.VPCSCConfig: - r"""Updates the VPCSC Config for the Project. + ) -> operation.Operation: + r"""Deletes an attachment. The returned Operation will finish once + the attachments has been deleted. It will not have any Operation + metadata and will return a ``google.protobuf.Empty`` response. .. code-block:: python @@ -4937,33 +6472,33 @@ def update_vpcsc_config( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import artifactregistry_v1 - def sample_update_vpcsc_config(): + def sample_delete_attachment(): # Create a client client = artifactregistry_v1.ArtifactRegistryClient() # Initialize request argument(s) - request = artifactregistry_v1.UpdateVPCSCConfigRequest( + request = artifactregistry_v1.DeleteAttachmentRequest( + name="name_value", ) # Make the request - response = client.update_vpcsc_config(request=request) + operation = client.delete_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest, dict]): - The request object. Sets the VPCSC config of the project. - vpcsc_config (google.cloud.artifactregistry_v1.types.VPCSCConfig): - The project config. - This corresponds to the ``vpcsc_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Field mask to support partial - updates. + request (Union[google.cloud.artifactregistry_v1.types.DeleteAttachmentRequest, dict]): + The request object. The request to delete an attachment. + name (str): + Required. The name of the attachment + to delete. - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4973,15 +6508,25 @@ def sample_update_vpcsc_config(): sent along with the request as metadata. Returns: - google.cloud.artifactregistry_v1.types.VPCSCConfig: - The Artifact Registry VPC SC config - that apply to a Project. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([vpcsc_config, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4990,25 +6535,21 @@ def sample_update_vpcsc_config(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, gda_vpcsc_config.UpdateVPCSCConfigRequest): - request = gda_vpcsc_config.UpdateVPCSCConfigRequest(request) + if not isinstance(request, attachment.DeleteAttachmentRequest): + request = attachment.DeleteAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if vpcsc_config is not None: - request.vpcsc_config = vpcsc_config - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_vpcsc_config] + rpc = self._transport._wrapped_methods[self._transport.delete_attachment] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("vpcsc_config.name", request.vpcsc_config.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -5022,6 +6563,14 @@ def sample_update_vpcsc_config(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + # Done; return the response. return response diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/pagers.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/pagers.py index c8506d599e30..72859ed617b5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/pagers.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/pagers.py @@ -40,9 +40,11 @@ from google.cloud.artifactregistry_v1.types import ( artifact, + attachment, file, package, repository, + rule, tag, version, ) @@ -1414,3 +1416,307 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRulesPager: + """A pager for iterating through ``list_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListRulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``rules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRules`` requests and continue to iterate + through the ``rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., rule.ListRulesResponse], + request: rule.ListRulesRequest, + response: rule.ListRulesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListRulesRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListRulesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = rule.ListRulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[rule.ListRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[rule.Rule]: + for page in self.pages: + yield from page.rules + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRulesAsyncPager: + """A pager for iterating through ``list_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListRulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``rules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRules`` requests and continue to iterate + through the ``rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[rule.ListRulesResponse]], + request: rule.ListRulesRequest, + response: rule.ListRulesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListRulesRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListRulesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = rule.ListRulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[rule.ListRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[rule.Rule]: + async def async_generator(): + async for page in self.pages: + for response in page.rules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAttachmentsPager: + """A pager for iterating through ``list_attachments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListAttachmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``attachments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAttachments`` requests and continue to iterate + through the ``attachments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListAttachmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., attachment.ListAttachmentsResponse], + request: attachment.ListAttachmentsRequest, + response: attachment.ListAttachmentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListAttachmentsRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListAttachmentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = attachment.ListAttachmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[attachment.ListAttachmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[attachment.Attachment]: + for page in self.pages: + yield from page.attachments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAttachmentsAsyncPager: + """A pager for iterating through ``list_attachments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListAttachmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``attachments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAttachments`` requests and continue to iterate + through the ``attachments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListAttachmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[attachment.ListAttachmentsResponse]], + request: attachment.ListAttachmentsRequest, + response: attachment.ListAttachmentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListAttachmentsRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListAttachmentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = attachment.ListAttachmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[attachment.ListAttachmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[attachment.Attachment]: + async def async_generator(): + async for page in self.pages: + for response in page.attachments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py index e7d8a85ab5ce..9bab91bc8c57 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py @@ -30,14 +30,23 @@ from google.protobuf import empty_pb2 # type: ignore from google.cloud.artifactregistry_v1 import gapic_version as package_version -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -255,6 +264,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.update_version: gapic_v1.method.wrap_method( + self.update_version, + default_timeout=None, + client_info=client_info, + ), self.list_files: gapic_v1.method.wrap_method( self.list_files, default_timeout=None, @@ -265,6 +279,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.delete_file: gapic_v1.method.wrap_method( + self.delete_file, + default_timeout=None, + client_info=client_info, + ), + self.update_file: gapic_v1.method.wrap_method( + self.update_file, + default_timeout=None, + client_info=client_info, + ), self.list_tags: gapic_v1.method.wrap_method( self.list_tags, default_timeout=None, @@ -290,6 +314,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_rule: gapic_v1.method.wrap_method( + self.create_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_rules: gapic_v1.method.wrap_method( + self.list_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_rule: gapic_v1.method.wrap_method( + self.get_rule, + default_timeout=None, + client_info=client_info, + ), + self.update_rule: gapic_v1.method.wrap_method( + self.update_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_rule: gapic_v1.method.wrap_method( + self.delete_rule, + default_timeout=None, + client_info=client_info, + ), self.set_iam_policy: gapic_v1.method.wrap_method( self.set_iam_policy, default_timeout=None, @@ -325,6 +374,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.update_package: gapic_v1.method.wrap_method( + self.update_package, + default_timeout=None, + client_info=client_info, + ), + self.list_attachments: gapic_v1.method.wrap_method( + self.list_attachments, + default_timeout=None, + client_info=client_info, + ), + self.get_attachment: gapic_v1.method.wrap_method( + self.get_attachment, + default_timeout=None, + client_info=client_info, + ), + self.create_attachment: gapic_v1.method.wrap_method( + self.create_attachment, + default_timeout=None, + client_info=client_info, + ), + self.delete_attachment: gapic_v1.method.wrap_method( + self.delete_attachment, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -567,6 +641,15 @@ def batch_delete_versions( ]: raise NotImplementedError() + @property + def update_version( + self, + ) -> Callable[ + [gda_version.UpdateVersionRequest], + Union[gda_version.Version, Awaitable[gda_version.Version]], + ]: + raise NotImplementedError() + @property def list_files( self, @@ -582,6 +665,23 @@ def get_file( ) -> Callable[[file.GetFileRequest], Union[file.File, Awaitable[file.File]]]: raise NotImplementedError() + @property + def delete_file( + self, + ) -> Callable[ + [file.DeleteFileRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_file( + self, + ) -> Callable[ + [gda_file.UpdateFileRequest], Union[gda_file.File, Awaitable[gda_file.File]] + ]: + raise NotImplementedError() + @property def list_tags( self, @@ -621,6 +721,45 @@ def delete_tag( ]: raise NotImplementedError() + @property + def create_rule( + self, + ) -> Callable[ + [gda_rule.CreateRuleRequest], Union[gda_rule.Rule, Awaitable[gda_rule.Rule]] + ]: + raise NotImplementedError() + + @property + def list_rules( + self, + ) -> Callable[ + [rule.ListRulesRequest], + Union[rule.ListRulesResponse, Awaitable[rule.ListRulesResponse]], + ]: + raise NotImplementedError() + + @property + def get_rule( + self, + ) -> Callable[[rule.GetRuleRequest], Union[rule.Rule, Awaitable[rule.Rule]]]: + raise NotImplementedError() + + @property + def update_rule( + self, + ) -> Callable[ + [gda_rule.UpdateRuleRequest], Union[gda_rule.Rule, Awaitable[gda_rule.Rule]] + ]: + raise NotImplementedError() + + @property + def delete_rule( + self, + ) -> Callable[ + [rule.DeleteRuleRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + @property def set_iam_policy( self, @@ -687,6 +826,54 @@ def update_vpcsc_config( ]: raise NotImplementedError() + @property + def update_package( + self, + ) -> Callable[ + [gda_package.UpdatePackageRequest], + Union[gda_package.Package, Awaitable[gda_package.Package]], + ]: + raise NotImplementedError() + + @property + def list_attachments( + self, + ) -> Callable[ + [attachment.ListAttachmentsRequest], + Union[ + attachment.ListAttachmentsResponse, + Awaitable[attachment.ListAttachmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_attachment( + self, + ) -> Callable[ + [attachment.GetAttachmentRequest], + Union[attachment.Attachment, Awaitable[attachment.Attachment]], + ]: + raise NotImplementedError() + + @property + def create_attachment( + self, + ) -> Callable[ + [gda_attachment.CreateAttachmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_attachment( + self, + ) -> Callable[ + [attachment.DeleteAttachmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py index a7029edd8511..eed5d0fde726 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py @@ -27,14 +27,23 @@ from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -870,6 +879,32 @@ def batch_delete_versions( ) return self._stubs["batch_delete_versions"] + @property + def update_version( + self, + ) -> Callable[[gda_version.UpdateVersionRequest], gda_version.Version]: + r"""Return a callable for the update version method over gRPC. + + Updates a version. + + Returns: + Callable[[~.UpdateVersionRequest], + ~.Version]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_version" not in self._stubs: + self._stubs["update_version"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdateVersion", + request_serializer=gda_version.UpdateVersionRequest.serialize, + response_deserializer=gda_version.Version.deserialize, + ) + return self._stubs["update_version"] + @property def list_files(self) -> Callable[[file.ListFilesRequest], file.ListFilesResponse]: r"""Return a callable for the list files method over gRPC. @@ -918,6 +953,58 @@ def get_file(self) -> Callable[[file.GetFileRequest], file.File]: ) return self._stubs["get_file"] + @property + def delete_file( + self, + ) -> Callable[[file.DeleteFileRequest], operations_pb2.Operation]: + r"""Return a callable for the delete file method over gRPC. + + Deletes a file and all of its content. It is only + allowed on generic repositories. The returned operation + will complete once the file has been deleted. + + Returns: + Callable[[~.DeleteFileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_file" not in self._stubs: + self._stubs["delete_file"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/DeleteFile", + request_serializer=file.DeleteFileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_file"] + + @property + def update_file(self) -> Callable[[gda_file.UpdateFileRequest], gda_file.File]: + r"""Return a callable for the update file method over gRPC. + + Updates a file. + + Returns: + Callable[[~.UpdateFileRequest], + ~.File]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_file" not in self._stubs: + self._stubs["update_file"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdateFile", + request_serializer=gda_file.UpdateFileRequest.serialize, + response_deserializer=gda_file.File.deserialize, + ) + return self._stubs["update_file"] + @property def list_tags(self) -> Callable[[tag.ListTagsRequest], tag.ListTagsResponse]: r"""Return a callable for the list tags method over gRPC. @@ -1038,6 +1125,126 @@ def delete_tag(self) -> Callable[[tag.DeleteTagRequest], empty_pb2.Empty]: ) return self._stubs["delete_tag"] + @property + def create_rule(self) -> Callable[[gda_rule.CreateRuleRequest], gda_rule.Rule]: + r"""Return a callable for the create rule method over gRPC. + + Creates a rule. + + Returns: + Callable[[~.CreateRuleRequest], + ~.Rule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_rule" not in self._stubs: + self._stubs["create_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/CreateRule", + request_serializer=gda_rule.CreateRuleRequest.serialize, + response_deserializer=gda_rule.Rule.deserialize, + ) + return self._stubs["create_rule"] + + @property + def list_rules(self) -> Callable[[rule.ListRulesRequest], rule.ListRulesResponse]: + r"""Return a callable for the list rules method over gRPC. + + Lists rules. + + Returns: + Callable[[~.ListRulesRequest], + ~.ListRulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_rules" not in self._stubs: + self._stubs["list_rules"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListRules", + request_serializer=rule.ListRulesRequest.serialize, + response_deserializer=rule.ListRulesResponse.deserialize, + ) + return self._stubs["list_rules"] + + @property + def get_rule(self) -> Callable[[rule.GetRuleRequest], rule.Rule]: + r"""Return a callable for the get rule method over gRPC. + + Gets a rule. + + Returns: + Callable[[~.GetRuleRequest], + ~.Rule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_rule" not in self._stubs: + self._stubs["get_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/GetRule", + request_serializer=rule.GetRuleRequest.serialize, + response_deserializer=rule.Rule.deserialize, + ) + return self._stubs["get_rule"] + + @property + def update_rule(self) -> Callable[[gda_rule.UpdateRuleRequest], gda_rule.Rule]: + r"""Return a callable for the update rule method over gRPC. + + Updates a rule. + + Returns: + Callable[[~.UpdateRuleRequest], + ~.Rule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_rule" not in self._stubs: + self._stubs["update_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdateRule", + request_serializer=gda_rule.UpdateRuleRequest.serialize, + response_deserializer=gda_rule.Rule.deserialize, + ) + return self._stubs["update_rule"] + + @property + def delete_rule(self) -> Callable[[rule.DeleteRuleRequest], empty_pb2.Empty]: + r"""Return a callable for the delete rule method over gRPC. + + Deletes a rule. + + Returns: + Callable[[~.DeleteRuleRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_rule" not in self._stubs: + self._stubs["delete_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/DeleteRule", + request_serializer=rule.DeleteRuleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_rule"] + @property def set_iam_policy( self, @@ -1226,6 +1433,142 @@ def update_vpcsc_config( ) return self._stubs["update_vpcsc_config"] + @property + def update_package( + self, + ) -> Callable[[gda_package.UpdatePackageRequest], gda_package.Package]: + r"""Return a callable for the update package method over gRPC. + + Updates a package. + + Returns: + Callable[[~.UpdatePackageRequest], + ~.Package]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_package" not in self._stubs: + self._stubs["update_package"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdatePackage", + request_serializer=gda_package.UpdatePackageRequest.serialize, + response_deserializer=gda_package.Package.deserialize, + ) + return self._stubs["update_package"] + + @property + def list_attachments( + self, + ) -> Callable[ + [attachment.ListAttachmentsRequest], attachment.ListAttachmentsResponse + ]: + r"""Return a callable for the list attachments method over gRPC. + + Lists attachments. + + Returns: + Callable[[~.ListAttachmentsRequest], + ~.ListAttachmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_attachments" not in self._stubs: + self._stubs["list_attachments"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListAttachments", + request_serializer=attachment.ListAttachmentsRequest.serialize, + response_deserializer=attachment.ListAttachmentsResponse.deserialize, + ) + return self._stubs["list_attachments"] + + @property + def get_attachment( + self, + ) -> Callable[[attachment.GetAttachmentRequest], attachment.Attachment]: + r"""Return a callable for the get attachment method over gRPC. + + Gets an attachment. + + Returns: + Callable[[~.GetAttachmentRequest], + ~.Attachment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_attachment" not in self._stubs: + self._stubs["get_attachment"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/GetAttachment", + request_serializer=attachment.GetAttachmentRequest.serialize, + response_deserializer=attachment.Attachment.deserialize, + ) + return self._stubs["get_attachment"] + + @property + def create_attachment( + self, + ) -> Callable[[gda_attachment.CreateAttachmentRequest], operations_pb2.Operation]: + r"""Return a callable for the create attachment method over gRPC. + + Creates an attachment. The returned Operation will + finish once the attachment has been created. Its + response will be the created attachment. + + Returns: + Callable[[~.CreateAttachmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_attachment" not in self._stubs: + self._stubs["create_attachment"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/CreateAttachment", + request_serializer=gda_attachment.CreateAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_attachment"] + + @property + def delete_attachment( + self, + ) -> Callable[[attachment.DeleteAttachmentRequest], operations_pb2.Operation]: + r"""Return a callable for the delete attachment method over gRPC. + + Deletes an attachment. The returned Operation will finish once + the attachments has been deleted. It will not have any Operation + metadata and will return a ``google.protobuf.Empty`` response. + + Returns: + Callable[[~.DeleteAttachmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_attachment" not in self._stubs: + self._stubs["delete_attachment"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/DeleteAttachment", + request_serializer=attachment.DeleteAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_attachment"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py index 5820af0852fd..0cb0f05886c1 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py @@ -30,14 +30,23 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -909,6 +918,32 @@ def batch_delete_versions( ) return self._stubs["batch_delete_versions"] + @property + def update_version( + self, + ) -> Callable[[gda_version.UpdateVersionRequest], Awaitable[gda_version.Version]]: + r"""Return a callable for the update version method over gRPC. + + Updates a version. + + Returns: + Callable[[~.UpdateVersionRequest], + Awaitable[~.Version]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_version" not in self._stubs: + self._stubs["update_version"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdateVersion", + request_serializer=gda_version.UpdateVersionRequest.serialize, + response_deserializer=gda_version.Version.deserialize, + ) + return self._stubs["update_version"] + @property def list_files( self, @@ -959,6 +994,60 @@ def get_file(self) -> Callable[[file.GetFileRequest], Awaitable[file.File]]: ) return self._stubs["get_file"] + @property + def delete_file( + self, + ) -> Callable[[file.DeleteFileRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete file method over gRPC. + + Deletes a file and all of its content. It is only + allowed on generic repositories. The returned operation + will complete once the file has been deleted. + + Returns: + Callable[[~.DeleteFileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_file" not in self._stubs: + self._stubs["delete_file"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/DeleteFile", + request_serializer=file.DeleteFileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_file"] + + @property + def update_file( + self, + ) -> Callable[[gda_file.UpdateFileRequest], Awaitable[gda_file.File]]: + r"""Return a callable for the update file method over gRPC. + + Updates a file. + + Returns: + Callable[[~.UpdateFileRequest], + Awaitable[~.File]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_file" not in self._stubs: + self._stubs["update_file"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdateFile", + request_serializer=gda_file.UpdateFileRequest.serialize, + response_deserializer=gda_file.File.deserialize, + ) + return self._stubs["update_file"] + @property def list_tags( self, @@ -1087,6 +1176,134 @@ def delete_tag( ) return self._stubs["delete_tag"] + @property + def create_rule( + self, + ) -> Callable[[gda_rule.CreateRuleRequest], Awaitable[gda_rule.Rule]]: + r"""Return a callable for the create rule method over gRPC. + + Creates a rule. + + Returns: + Callable[[~.CreateRuleRequest], + Awaitable[~.Rule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_rule" not in self._stubs: + self._stubs["create_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/CreateRule", + request_serializer=gda_rule.CreateRuleRequest.serialize, + response_deserializer=gda_rule.Rule.deserialize, + ) + return self._stubs["create_rule"] + + @property + def list_rules( + self, + ) -> Callable[[rule.ListRulesRequest], Awaitable[rule.ListRulesResponse]]: + r"""Return a callable for the list rules method over gRPC. + + Lists rules. + + Returns: + Callable[[~.ListRulesRequest], + Awaitable[~.ListRulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_rules" not in self._stubs: + self._stubs["list_rules"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListRules", + request_serializer=rule.ListRulesRequest.serialize, + response_deserializer=rule.ListRulesResponse.deserialize, + ) + return self._stubs["list_rules"] + + @property + def get_rule(self) -> Callable[[rule.GetRuleRequest], Awaitable[rule.Rule]]: + r"""Return a callable for the get rule method over gRPC. + + Gets a rule. + + Returns: + Callable[[~.GetRuleRequest], + Awaitable[~.Rule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_rule" not in self._stubs: + self._stubs["get_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/GetRule", + request_serializer=rule.GetRuleRequest.serialize, + response_deserializer=rule.Rule.deserialize, + ) + return self._stubs["get_rule"] + + @property + def update_rule( + self, + ) -> Callable[[gda_rule.UpdateRuleRequest], Awaitable[gda_rule.Rule]]: + r"""Return a callable for the update rule method over gRPC. + + Updates a rule. + + Returns: + Callable[[~.UpdateRuleRequest], + Awaitable[~.Rule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_rule" not in self._stubs: + self._stubs["update_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdateRule", + request_serializer=gda_rule.UpdateRuleRequest.serialize, + response_deserializer=gda_rule.Rule.deserialize, + ) + return self._stubs["update_rule"] + + @property + def delete_rule( + self, + ) -> Callable[[rule.DeleteRuleRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete rule method over gRPC. + + Deletes a rule. + + Returns: + Callable[[~.DeleteRuleRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_rule" not in self._stubs: + self._stubs["delete_rule"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/DeleteRule", + request_serializer=rule.DeleteRuleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_rule"] + @property def set_iam_policy( self, @@ -1282,6 +1499,147 @@ def update_vpcsc_config( ) return self._stubs["update_vpcsc_config"] + @property + def update_package( + self, + ) -> Callable[[gda_package.UpdatePackageRequest], Awaitable[gda_package.Package]]: + r"""Return a callable for the update package method over gRPC. + + Updates a package. + + Returns: + Callable[[~.UpdatePackageRequest], + Awaitable[~.Package]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_package" not in self._stubs: + self._stubs["update_package"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/UpdatePackage", + request_serializer=gda_package.UpdatePackageRequest.serialize, + response_deserializer=gda_package.Package.deserialize, + ) + return self._stubs["update_package"] + + @property + def list_attachments( + self, + ) -> Callable[ + [attachment.ListAttachmentsRequest], + Awaitable[attachment.ListAttachmentsResponse], + ]: + r"""Return a callable for the list attachments method over gRPC. + + Lists attachments. + + Returns: + Callable[[~.ListAttachmentsRequest], + Awaitable[~.ListAttachmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_attachments" not in self._stubs: + self._stubs["list_attachments"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListAttachments", + request_serializer=attachment.ListAttachmentsRequest.serialize, + response_deserializer=attachment.ListAttachmentsResponse.deserialize, + ) + return self._stubs["list_attachments"] + + @property + def get_attachment( + self, + ) -> Callable[[attachment.GetAttachmentRequest], Awaitable[attachment.Attachment]]: + r"""Return a callable for the get attachment method over gRPC. + + Gets an attachment. + + Returns: + Callable[[~.GetAttachmentRequest], + Awaitable[~.Attachment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_attachment" not in self._stubs: + self._stubs["get_attachment"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/GetAttachment", + request_serializer=attachment.GetAttachmentRequest.serialize, + response_deserializer=attachment.Attachment.deserialize, + ) + return self._stubs["get_attachment"] + + @property + def create_attachment( + self, + ) -> Callable[ + [gda_attachment.CreateAttachmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create attachment method over gRPC. + + Creates an attachment. The returned Operation will + finish once the attachment has been created. Its + response will be the created attachment. + + Returns: + Callable[[~.CreateAttachmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_attachment" not in self._stubs: + self._stubs["create_attachment"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/CreateAttachment", + request_serializer=gda_attachment.CreateAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_attachment"] + + @property + def delete_attachment( + self, + ) -> Callable[ + [attachment.DeleteAttachmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete attachment method over gRPC. + + Deletes an attachment. The returned Operation will finish once + the attachments has been deleted. It will not have any Operation + metadata and will return a ``google.protobuf.Empty`` response. + + Returns: + Callable[[~.DeleteAttachmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_attachment" not in self._stubs: + self._stubs["delete_attachment"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/DeleteAttachment", + request_serializer=attachment.DeleteAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_attachment"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1395,6 +1753,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.update_version: self._wrap_method( + self.update_version, + default_timeout=None, + client_info=client_info, + ), self.list_files: self._wrap_method( self.list_files, default_timeout=None, @@ -1405,6 +1768,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.delete_file: self._wrap_method( + self.delete_file, + default_timeout=None, + client_info=client_info, + ), + self.update_file: self._wrap_method( + self.update_file, + default_timeout=None, + client_info=client_info, + ), self.list_tags: self._wrap_method( self.list_tags, default_timeout=None, @@ -1430,6 +1803,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_rule: self._wrap_method( + self.create_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_rules: self._wrap_method( + self.list_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_rule: self._wrap_method( + self.get_rule, + default_timeout=None, + client_info=client_info, + ), + self.update_rule: self._wrap_method( + self.update_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_rule: self._wrap_method( + self.delete_rule, + default_timeout=None, + client_info=client_info, + ), self.set_iam_policy: self._wrap_method( self.set_iam_policy, default_timeout=None, @@ -1465,6 +1863,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.update_package: self._wrap_method( + self.update_package, + default_timeout=None, + client_info=client_info, + ), + self.list_attachments: self._wrap_method( + self.list_attachments, + default_timeout=None, + client_info=client_info, + ), + self.get_attachment: self._wrap_method( + self.get_attachment, + default_timeout=None, + client_info=client_info, + ), + self.create_attachment: self._wrap_method( + self.create_attachment, + default_timeout=None, + client_info=client_info, + ), + self.delete_attachment: self._wrap_method( + self.delete_attachment, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py index 3189527330cd..b7b5058680eb 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py @@ -32,14 +32,23 @@ from google.protobuf import json_format from requests import __version__ as requests_version -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -82,6 +91,14 @@ def post_batch_delete_versions(self, response): logging.log(f"Received response: {response}") return response + def pre_create_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_attachment(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_repository(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +107,14 @@ def post_create_repository(self, response): logging.log(f"Received response: {response}") return response + def pre_create_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_tag(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -98,6 +123,22 @@ def post_create_tag(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_attachment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_file(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_package(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -114,6 +155,10 @@ def post_delete_repository(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_delete_tag(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -126,6 +171,14 @@ def post_delete_version(self, response): logging.log(f"Received response: {response}") return response + def pre_get_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_attachment(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_docker_image(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -198,6 +251,14 @@ def post_get_repository(self, response): logging.log(f"Received response: {response}") return response + def pre_get_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_tag(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -238,6 +299,14 @@ def post_import_yum_artifacts(self, response): logging.log(f"Received response: {response}") return response + def pre_list_attachments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_attachments(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_docker_images(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -294,6 +363,14 @@ def post_list_repositories(self, response): logging.log(f"Received response: {response}") return response + def pre_list_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_rules(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_tags(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -326,6 +403,22 @@ def post_test_iam_permissions(self, response): logging.log(f"Received response: {response}") return response + def pre_update_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_file(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_package(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_package(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_project_settings(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -342,6 +435,14 @@ def post_update_repository(self, response): logging.log(f"Received response: {response}") return response + def pre_update_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_tag(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -350,6 +451,14 @@ def post_update_tag(self, response): logging.log(f"Received response: {response}") return response + def pre_update_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_version(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_vpcsc_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -387,6 +496,29 @@ def post_batch_delete_versions( """ return response + def pre_create_attachment( + self, + request: gda_attachment.CreateAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gda_attachment.CreateAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_create_attachment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_attachment + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_create_repository( self, request: gda_repository.CreateRepositoryRequest, @@ -410,6 +542,25 @@ def post_create_repository( """ return response + def pre_create_rule( + self, request: gda_rule.CreateRuleRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gda_rule.CreateRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_create_rule(self, response: gda_rule.Rule) -> gda_rule.Rule: + """Post-rpc interceptor for create_rule + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_create_tag( self, request: gda_tag.CreateTagRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[gda_tag.CreateTagRequest, Sequence[Tuple[str, str]]]: @@ -429,6 +580,50 @@ def post_create_tag(self, response: gda_tag.Tag) -> gda_tag.Tag: """ return response + def pre_delete_attachment( + self, + request: attachment.DeleteAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[attachment.DeleteAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_delete_attachment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_attachment + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + + def pre_delete_file( + self, request: file.DeleteFileRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[file.DeleteFileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_delete_file( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_file + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_delete_package( self, request: package.DeletePackageRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[package.DeletePackageRequest, Sequence[Tuple[str, str]]]: @@ -473,6 +668,16 @@ def post_delete_repository( """ return response + def pre_delete_rule( + self, request: rule.DeleteRuleRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[rule.DeleteRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + def pre_delete_tag( self, request: tag.DeleteTagRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[tag.DeleteTagRequest, Sequence[Tuple[str, str]]]: @@ -504,6 +709,29 @@ def post_delete_version( """ return response + def pre_get_attachment( + self, + request: attachment.GetAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[attachment.GetAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_get_attachment( + self, response: attachment.Attachment + ) -> attachment.Attachment: + """Post-rpc interceptor for get_attachment + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_get_docker_image( self, request: artifact.GetDockerImageRequest, @@ -701,6 +929,25 @@ def post_get_repository( """ return response + def pre_get_rule( + self, request: rule.GetRuleRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[rule.GetRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_get_rule(self, response: rule.Rule) -> rule.Rule: + """Post-rpc interceptor for get_rule + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_get_tag( self, request: tag.GetTagRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[tag.GetTagRequest, Sequence[Tuple[str, str]]]: @@ -808,6 +1055,29 @@ def post_import_yum_artifacts( """ return response + def pre_list_attachments( + self, + request: attachment.ListAttachmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[attachment.ListAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_attachments + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_list_attachments( + self, response: attachment.ListAttachmentsResponse + ) -> attachment.ListAttachmentsResponse: + """Post-rpc interceptor for list_attachments + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_list_docker_images( self, request: artifact.ListDockerImagesRequest, @@ -965,6 +1235,27 @@ def post_list_repositories( """ return response + def pre_list_rules( + self, request: rule.ListRulesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[rule.ListRulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_list_rules( + self, response: rule.ListRulesResponse + ) -> rule.ListRulesResponse: + """Post-rpc interceptor for list_rules + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_list_tags( self, request: tag.ListTagsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[tag.ListTagsRequest, Sequence[Tuple[str, str]]]: @@ -1049,6 +1340,46 @@ def post_test_iam_permissions( """ return response + def pre_update_file( + self, request: gda_file.UpdateFileRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gda_file.UpdateFileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_update_file(self, response: gda_file.File) -> gda_file.File: + """Post-rpc interceptor for update_file + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + + def pre_update_package( + self, + request: gda_package.UpdatePackageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gda_package.UpdatePackageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_package + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_update_package(self, response: gda_package.Package) -> gda_package.Package: + """Post-rpc interceptor for update_package + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_update_project_settings( self, request: settings.UpdateProjectSettingsRequest, @@ -1095,6 +1426,25 @@ def post_update_repository( """ return response + def pre_update_rule( + self, request: gda_rule.UpdateRuleRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gda_rule.UpdateRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_update_rule(self, response: gda_rule.Rule) -> gda_rule.Rule: + """Post-rpc interceptor for update_rule + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_update_tag( self, request: gda_tag.UpdateTagRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[gda_tag.UpdateTagRequest, Sequence[Tuple[str, str]]]: @@ -1114,6 +1464,27 @@ def post_update_tag(self, response: gda_tag.Tag) -> gda_tag.Tag: """ return response + def pre_update_version( + self, + request: gda_version.UpdateVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gda_version.UpdateVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_update_version(self, response: gda_version.Version) -> gda_version.Version: + """Post-rpc interceptor for update_version + + Override in a subclass to manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. + """ + return response + def pre_update_vpcsc_config( self, request: gda_vpcsc_config.UpdateVPCSCConfigRequest, @@ -1439,7 +1810,106 @@ def __call__( resp = self._interceptor.post_batch_delete_versions(resp) return resp - class _CreateRepository( + class _CreateAttachment( + _BaseArtifactRegistryRestTransport._BaseCreateAttachment, + ArtifactRegistryRestStub, + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.CreateAttachment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gda_attachment.CreateAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create attachment method over HTTP. + + Args: + request (~.gda_attachment.CreateAttachmentRequest): + The request object. The request to create a new + attachment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseCreateAttachment._get_http_options() + ) + request, metadata = self._interceptor.pre_create_attachment( + request, metadata + ) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseCreateAttachment._get_transcoded_request( + http_options, request + ) + + body = _BaseArtifactRegistryRestTransport._BaseCreateAttachment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseCreateAttachment._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._CreateAttachment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_attachment(resp) + return resp + + class _CreateRepository( _BaseArtifactRegistryRestTransport._BaseCreateRepository, ArtifactRegistryRestStub, ): @@ -1538,6 +2008,106 @@ def __call__( resp = self._interceptor.post_create_repository(resp) return resp + class _CreateRule( + _BaseArtifactRegistryRestTransport._BaseCreateRule, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.CreateRule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gda_rule.CreateRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_rule.Rule: + r"""Call the create rule method over HTTP. + + Args: + request (~.gda_rule.CreateRuleRequest): + The request object. The request to create a new rule. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gda_rule.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseCreateRule._get_http_options() + ) + request, metadata = self._interceptor.pre_create_rule(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseCreateRule._get_transcoded_request( + http_options, request + ) + + body = _BaseArtifactRegistryRestTransport._BaseCreateRule._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseCreateRule._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._CreateRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gda_rule.Rule() + pb_resp = gda_rule.Rule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_rule(resp) + return resp + class _CreateTag( _BaseArtifactRegistryRestTransport._BaseCreateTag, ArtifactRegistryRestStub ): @@ -1635,6 +2205,187 @@ def __call__( resp = self._interceptor.post_create_tag(resp) return resp + class _DeleteAttachment( + _BaseArtifactRegistryRestTransport._BaseDeleteAttachment, + ArtifactRegistryRestStub, + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.DeleteAttachment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: attachment.DeleteAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete attachment method over HTTP. + + Args: + request (~.attachment.DeleteAttachmentRequest): + The request object. The request to delete an attachment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseDeleteAttachment._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_attachment( + request, metadata + ) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseDeleteAttachment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseDeleteAttachment._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._DeleteAttachment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_attachment(resp) + return resp + + class _DeleteFile( + _BaseArtifactRegistryRestTransport._BaseDeleteFile, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.DeleteFile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: file.DeleteFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete file method over HTTP. + + Args: + request (~.file.DeleteFileRequest): + The request object. The request to delete a file. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseDeleteFile._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_file(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseDeleteFile._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseDeleteFile._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._DeleteFile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_file(resp) + return resp + class _DeletePackage( _BaseArtifactRegistryRestTransport._BaseDeletePackage, ArtifactRegistryRestStub ): @@ -1704,7 +2455,99 @@ def __call__( ) # Send the request - response = ArtifactRegistryRestTransport._DeletePackage._get_response( + response = ArtifactRegistryRestTransport._DeletePackage._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_package(resp) + return resp + + class _DeleteRepository( + _BaseArtifactRegistryRestTransport._BaseDeleteRepository, + ArtifactRegistryRestStub, + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.DeleteRepository") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: repository.DeleteRepositoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete repository method over HTTP. + + Args: + request (~.repository.DeleteRepositoryRequest): + The request object. The request to delete a repository. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseDeleteRepository._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_repository( + request, metadata + ) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseDeleteRepository._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseDeleteRepository._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._DeleteRepository._get_response( self._host, metadata, query_params, @@ -1721,15 +2564,14 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_package(resp) + resp = self._interceptor.post_delete_repository(resp) return resp - class _DeleteRepository( - _BaseArtifactRegistryRestTransport._BaseDeleteRepository, - ArtifactRegistryRestStub, + class _DeleteRule( + _BaseArtifactRegistryRestTransport._BaseDeleteRule, ArtifactRegistryRestStub ): def __hash__(self): - return hash("ArtifactRegistryRestTransport.DeleteRepository") + return hash("ArtifactRegistryRestTransport.DeleteRule") @staticmethod def _get_response( @@ -1755,48 +2597,39 @@ def _get_response( def __call__( self, - request: repository.DeleteRepositoryRequest, + request: rule.DeleteRuleRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete repository method over HTTP. + ): + r"""Call the delete rule method over HTTP. Args: - request (~.repository.DeleteRepositoryRequest): - The request object. The request to delete a repository. + request (~.rule.DeleteRuleRequest): + The request object. The request to delete a rule. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - """ http_options = ( - _BaseArtifactRegistryRestTransport._BaseDeleteRepository._get_http_options() - ) - request, metadata = self._interceptor.pre_delete_repository( - request, metadata + _BaseArtifactRegistryRestTransport._BaseDeleteRule._get_http_options() ) - transcoded_request = _BaseArtifactRegistryRestTransport._BaseDeleteRepository._get_transcoded_request( + request, metadata = self._interceptor.pre_delete_rule(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseDeleteRule._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseArtifactRegistryRestTransport._BaseDeleteRepository._get_query_params_json( + query_params = _BaseArtifactRegistryRestTransport._BaseDeleteRule._get_query_params_json( transcoded_request ) # Send the request - response = ArtifactRegistryRestTransport._DeleteRepository._get_response( + response = ArtifactRegistryRestTransport._DeleteRule._get_response( self._host, metadata, query_params, @@ -1810,12 +2643,6 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_repository(resp) - return resp - class _DeleteTag( _BaseArtifactRegistryRestTransport._BaseDeleteTag, ArtifactRegistryRestStub ): @@ -1981,6 +2808,100 @@ def __call__( resp = self._interceptor.post_delete_version(resp) return resp + class _GetAttachment( + _BaseArtifactRegistryRestTransport._BaseGetAttachment, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.GetAttachment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: attachment.GetAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> attachment.Attachment: + r"""Call the get attachment method over HTTP. + + Args: + request (~.attachment.GetAttachmentRequest): + The request object. The request to retrieve an + attachment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.attachment.Attachment: + An Attachment refers to additional + metadata that can be attached to + artifacts in Artifact Registry. An + attachment consists of one or more + files. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseGetAttachment._get_http_options() + ) + request, metadata = self._interceptor.pre_get_attachment(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseGetAttachment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseGetAttachment._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._GetAttachment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = attachment.Attachment() + pb_resp = attachment.Attachment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_attachment(resp) + return resp + class _GetDockerImage( _BaseArtifactRegistryRestTransport._BaseGetDockerImage, ArtifactRegistryRestStub ): @@ -2887,6 +3808,104 @@ def __call__( resp = self._interceptor.post_get_repository(resp) return resp + class _GetRule( + _BaseArtifactRegistryRestTransport._BaseGetRule, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.GetRule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: rule.GetRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rule.Rule: + r"""Call the get rule method over HTTP. + + Args: + request (~.rule.GetRuleRequest): + The request object. The request to retrieve a rule. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.rule.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseGetRule._get_http_options() + ) + request, metadata = self._interceptor.pre_get_rule(request, metadata) + transcoded_request = ( + _BaseArtifactRegistryRestTransport._BaseGetRule._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseArtifactRegistryRestTransport._BaseGetRule._get_query_params_json( + transcoded_request + ) + ) + + # Send the request + response = ArtifactRegistryRestTransport._GetRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rule.Rule() + pb_resp = rule.Rule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rule(resp) + return resp + class _GetTag( _BaseArtifactRegistryRestTransport._BaseGetTag, ArtifactRegistryRestStub ): @@ -3360,9 +4379,102 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_import_yum_artifacts(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_yum_artifacts(resp) + return resp + + class _ListAttachments( + _BaseArtifactRegistryRestTransport._BaseListAttachments, + ArtifactRegistryRestStub, + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.ListAttachments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: attachment.ListAttachmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> attachment.ListAttachmentsResponse: + r"""Call the list attachments method over HTTP. + + Args: + request (~.attachment.ListAttachmentsRequest): + The request object. The request to list attachments. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.attachment.ListAttachmentsResponse: + The response from listing + attachments. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseListAttachments._get_http_options() + ) + request, metadata = self._interceptor.pre_list_attachments( + request, metadata + ) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseListAttachments._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseListAttachments._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._ListAttachments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = attachment.ListAttachmentsResponse() + pb_resp = attachment.ListAttachmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_attachments(resp) return resp class _ListDockerImages( @@ -4006,6 +5118,94 @@ def __call__( resp = self._interceptor.post_list_repositories(resp) return resp + class _ListRules( + _BaseArtifactRegistryRestTransport._BaseListRules, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.ListRules") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: rule.ListRulesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rule.ListRulesResponse: + r"""Call the list rules method over HTTP. + + Args: + request (~.rule.ListRulesRequest): + The request object. The request to list rules. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.rule.ListRulesResponse: + The response from listing rules. + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseListRules._get_http_options() + ) + request, metadata = self._interceptor.pre_list_rules(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseListRules._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseListRules._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._ListRules._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rule.ListRulesResponse() + pb_resp = rule.ListRulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_rules(resp) + return resp + class _ListTags( _BaseArtifactRegistryRestTransport._BaseListTags, ArtifactRegistryRestStub ): @@ -4313,24 +5513,218 @@ def __call__( """ http_options = ( - _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_http_options() + _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_http_options() + ) + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions( + _BaseArtifactRegistryRestTransport._BaseTestIamPermissions, + ArtifactRegistryRestStub, + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_http_options() + ) + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _UpdateFile( + _BaseArtifactRegistryRestTransport._BaseUpdateFile, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.UpdateFile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gda_file.UpdateFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_file.File: + r"""Call the update file method over HTTP. + + Args: + request (~.gda_file.UpdateFileRequest): + The request object. The request to update a file. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gda_file.File: + Files store content that is + potentially associated with Packages or + Versions. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseUpdateFile._get_http_options() ) - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_transcoded_request( + request, metadata = self._interceptor.pre_update_file(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseUpdateFile._get_transcoded_request( http_options, request ) - body = _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_request_body_json( + body = _BaseArtifactRegistryRestTransport._BaseUpdateFile._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = _BaseArtifactRegistryRestTransport._BaseSetIamPolicy._get_query_params_json( + query_params = _BaseArtifactRegistryRestTransport._BaseUpdateFile._get_query_params_json( transcoded_request ) # Send the request - response = ArtifactRegistryRestTransport._SetIamPolicy._get_response( + response = ArtifactRegistryRestTransport._UpdateFile._get_response( self._host, metadata, query_params, @@ -4346,19 +5740,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = policy_pb2.Policy() - pb_resp = resp + resp = gda_file.File() + pb_resp = gda_file.File.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_set_iam_policy(resp) + resp = self._interceptor.post_update_file(resp) return resp - class _TestIamPermissions( - _BaseArtifactRegistryRestTransport._BaseTestIamPermissions, - ArtifactRegistryRestStub, + class _UpdatePackage( + _BaseArtifactRegistryRestTransport._BaseUpdatePackage, ArtifactRegistryRestStub ): def __hash__(self): - return hash("ArtifactRegistryRestTransport.TestIamPermissions") + return hash("ArtifactRegistryRestTransport.UpdatePackage") @staticmethod def _get_response( @@ -4385,17 +5778,17 @@ def _get_response( def __call__( self, - request: iam_policy_pb2.TestIamPermissionsRequest, + request: gda_package.UpdatePackageRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. + ) -> gda_package.Package: + r"""Call the update package method over HTTP. Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. + request (~.gda_package.UpdatePackageRequest): + The request object. The request to update a package. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4403,31 +5796,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. + ~.gda_package.Package: + Packages are named collections of + versions. + """ http_options = ( - _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_http_options() + _BaseArtifactRegistryRestTransport._BaseUpdatePackage._get_http_options() ) - request, metadata = self._interceptor.pre_test_iam_permissions( - request, metadata - ) - transcoded_request = _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_transcoded_request( + request, metadata = self._interceptor.pre_update_package(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseUpdatePackage._get_transcoded_request( http_options, request ) - body = _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_request_body_json( + body = _BaseArtifactRegistryRestTransport._BaseUpdatePackage._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = _BaseArtifactRegistryRestTransport._BaseTestIamPermissions._get_query_params_json( + query_params = _BaseArtifactRegistryRestTransport._BaseUpdatePackage._get_query_params_json( transcoded_request ) # Send the request - response = ArtifactRegistryRestTransport._TestIamPermissions._get_response( + response = ArtifactRegistryRestTransport._UpdatePackage._get_response( self._host, metadata, query_params, @@ -4443,11 +5836,11 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp + resp = gda_package.Package() + pb_resp = gda_package.Package.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_test_iam_permissions(resp) + resp = self._interceptor.post_update_package(resp) return resp class _UpdateProjectSettings( @@ -4650,6 +6043,106 @@ def __call__( resp = self._interceptor.post_update_repository(resp) return resp + class _UpdateRule( + _BaseArtifactRegistryRestTransport._BaseUpdateRule, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.UpdateRule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gda_rule.UpdateRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_rule.Rule: + r"""Call the update rule method over HTTP. + + Args: + request (~.gda_rule.UpdateRuleRequest): + The request object. The request to update a rule. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gda_rule.Rule: + A rule defines the deny or allow + action of the operation it applies to + and the conditions required for the rule + to apply. You can set one rule for an + entire repository and one rule for each + package within. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseUpdateRule._get_http_options() + ) + request, metadata = self._interceptor.pre_update_rule(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseUpdateRule._get_transcoded_request( + http_options, request + ) + + body = _BaseArtifactRegistryRestTransport._BaseUpdateRule._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseUpdateRule._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._UpdateRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gda_rule.Rule() + pb_resp = gda_rule.Rule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_rule(resp) + return resp + class _UpdateTag( _BaseArtifactRegistryRestTransport._BaseUpdateTag, ArtifactRegistryRestStub ): @@ -4748,6 +6241,105 @@ def __call__( resp = self._interceptor.post_update_tag(resp) return resp + class _UpdateVersion( + _BaseArtifactRegistryRestTransport._BaseUpdateVersion, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.UpdateVersion") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gda_version.UpdateVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gda_version.Version: + r"""Call the update version method over HTTP. + + Args: + request (~.gda_version.UpdateVersionRequest): + The request object. The request to update a version. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gda_version.Version: + The body of a version resource. A + version resource represents a collection + of components, such as files and other + data. This may correspond to a version + in many package management schemes. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseUpdateVersion._get_http_options() + ) + request, metadata = self._interceptor.pre_update_version(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseUpdateVersion._get_transcoded_request( + http_options, request + ) + + body = _BaseArtifactRegistryRestTransport._BaseUpdateVersion._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseUpdateVersion._get_query_params_json( + transcoded_request + ) + + # Send the request + response = ArtifactRegistryRestTransport._UpdateVersion._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gda_version.Version() + pb_resp = gda_version.Version.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_version(resp) + return resp + class _UpdateVPCSCConfig( _BaseArtifactRegistryRestTransport._BaseUpdateVPCSCConfig, ArtifactRegistryRestStub, @@ -4855,6 +6447,14 @@ def batch_delete_versions( # In C++ this would require a dynamic_cast return self._BatchDeleteVersions(self._session, self._host, self._interceptor) # type: ignore + @property + def create_attachment( + self, + ) -> Callable[[gda_attachment.CreateAttachmentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAttachment(self._session, self._host, self._interceptor) # type: ignore + @property def create_repository( self, @@ -4863,12 +6463,34 @@ def create_repository( # In C++ this would require a dynamic_cast return self._CreateRepository(self._session, self._host, self._interceptor) # type: ignore + @property + def create_rule(self) -> Callable[[gda_rule.CreateRuleRequest], gda_rule.Rule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRule(self._session, self._host, self._interceptor) # type: ignore + @property def create_tag(self) -> Callable[[gda_tag.CreateTagRequest], gda_tag.Tag]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._CreateTag(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_attachment( + self, + ) -> Callable[[attachment.DeleteAttachmentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_file( + self, + ) -> Callable[[file.DeleteFileRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteFile(self._session, self._host, self._interceptor) # type: ignore + @property def delete_package( self, @@ -4885,6 +6507,12 @@ def delete_repository( # In C++ this would require a dynamic_cast return self._DeleteRepository(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_rule(self) -> Callable[[rule.DeleteRuleRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRule(self._session, self._host, self._interceptor) # type: ignore + @property def delete_tag(self) -> Callable[[tag.DeleteTagRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -4899,6 +6527,14 @@ def delete_version( # In C++ this would require a dynamic_cast return self._DeleteVersion(self._session, self._host, self._interceptor) # type: ignore + @property + def get_attachment( + self, + ) -> Callable[[attachment.GetAttachmentRequest], attachment.Attachment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAttachment(self._session, self._host, self._interceptor) # type: ignore + @property def get_docker_image( self, @@ -4967,6 +6603,12 @@ def get_repository( # In C++ this would require a dynamic_cast return self._GetRepository(self._session, self._host, self._interceptor) # type: ignore + @property + def get_rule(self) -> Callable[[rule.GetRuleRequest], rule.Rule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRule(self._session, self._host, self._interceptor) # type: ignore + @property def get_tag(self) -> Callable[[tag.GetTagRequest], tag.Tag]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -5003,6 +6645,16 @@ def import_yum_artifacts( # In C++ this would require a dynamic_cast return self._ImportYumArtifacts(self._session, self._host, self._interceptor) # type: ignore + @property + def list_attachments( + self, + ) -> Callable[ + [attachment.ListAttachmentsRequest], attachment.ListAttachmentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAttachments(self._session, self._host, self._interceptor) # type: ignore + @property def list_docker_images( self, @@ -5065,6 +6717,12 @@ def list_repositories( # In C++ this would require a dynamic_cast return self._ListRepositories(self._session, self._host, self._interceptor) # type: ignore + @property + def list_rules(self) -> Callable[[rule.ListRulesRequest], rule.ListRulesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRules(self._session, self._host, self._interceptor) # type: ignore + @property def list_tags(self) -> Callable[[tag.ListTagsRequest], tag.ListTagsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -5098,6 +6756,20 @@ def test_iam_permissions( # In C++ this would require a dynamic_cast return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property + def update_file(self) -> Callable[[gda_file.UpdateFileRequest], gda_file.File]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_package( + self, + ) -> Callable[[gda_package.UpdatePackageRequest], gda_package.Package]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePackage(self._session, self._host, self._interceptor) # type: ignore + @property def update_project_settings( self, @@ -5114,12 +6786,26 @@ def update_repository( # In C++ this would require a dynamic_cast return self._UpdateRepository(self._session, self._host, self._interceptor) # type: ignore + @property + def update_rule(self) -> Callable[[gda_rule.UpdateRuleRequest], gda_rule.Rule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateRule(self._session, self._host, self._interceptor) # type: ignore + @property def update_tag(self) -> Callable[[gda_tag.UpdateTagRequest], gda_tag.Tag]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._UpdateTag(self._session, self._host, self._interceptor) # type: ignore + @property + def update_version( + self, + ) -> Callable[[gda_version.UpdateVersionRequest], gda_version.Version]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateVersion(self._session, self._host, self._interceptor) # type: ignore + @property def update_vpcsc_config( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py index 8361b950576f..d36109560ccc 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py @@ -25,14 +25,23 @@ from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -158,6 +167,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateAttachment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "attachmentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/repositories/*}/attachments", + "body": "attachment", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gda_attachment.CreateAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseCreateAttachment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateRepository: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -217,6 +285,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateRule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/repositories/*}/rules", + "body": "rule", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gda_rule.CreateRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseCreateRule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateTag: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -259,6 +384,100 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteAttachment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/repositories/*/attachments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = attachment.DeleteAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseDeleteAttachment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteFile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/repositories/*/files/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = file.DeleteFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseDeleteFile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeletePackage: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -353,6 +572,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteRule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/repositories/*/rules/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = rule.DeleteRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseDeleteRule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteTag: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -417,6 +683,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetAttachment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/repositories/*/attachments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = attachment.GetAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseGetAttachment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetDockerImage: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -840,6 +1153,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetRule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/repositories/*/rules/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = rule.GetRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseGetRule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetTag: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -968,7 +1328,49 @@ def _get_http_options(): @staticmethod def _get_transcoded_request(http_options, request): - pb_request = apt_artifact.ImportAptArtifactsRequest.pb(request) + pb_request = apt_artifact.ImportAptArtifactsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseImportYumArtifacts: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/repositories/*}/yumArtifacts:import", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = yum_artifact.ImportYumArtifactsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -993,36 +1395,36 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params - class _BaseImportYumArtifacts: + class _BaseListAttachments: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + @staticmethod def _get_http_options(): http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{parent=projects/*/locations/*/repositories/*}/yumArtifacts:import", - "body": "*", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/repositories/*}/attachments", }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = yum_artifact.ImportYumArtifactsRequest.pb(request) + pb_request = attachment.ListAttachmentsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - @staticmethod def _get_query_params_json(transcoded_request): query_params = json.loads( @@ -1031,6 +1433,11 @@ def _get_query_params_json(transcoded_request): use_integers_for_enums=True, ) ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseListAttachments._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -1364,6 +1771,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListRules: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/repositories/*}/rules", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = rule.ListRulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseListRules._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListTags: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1542,6 +1996,107 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateFile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{file.name=projects/*/locations/*/repositories/*/files/*}", + "body": "file", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gda_file.UpdateFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseUpdateFile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdatePackage: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{package.name=projects/*/locations/*/repositories/*/packages/*}", + "body": "package", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gda_package.UpdatePackageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateProjectSettings: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1626,6 +2181,48 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateRule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{rule.name=projects/*/locations/*/repositories/*/rules/*}", + "body": "rule", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gda_rule.UpdateRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateTag: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1668,6 +2265,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateVersion: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{version.name=projects/*/locations/*/repositories/*/packages/*/versions/*}", + "body": "version", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gda_version.UpdateVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseUpdateVersion._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateVPCSCConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py index 03d8f5baa554..9d71c93dac06 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py @@ -39,13 +39,31 @@ NpmPackage, PythonPackage, ) -from .file import File, GetFileRequest, Hash, ListFilesRequest, ListFilesResponse +from .attachment import ( + Attachment, + CreateAttachmentRequest, + DeleteAttachmentRequest, + GetAttachmentRequest, + ListAttachmentsRequest, + ListAttachmentsResponse, +) +from .file import ( + DeleteFileRequest, + File, + GetFileRequest, + Hash, + ListFilesRequest, + ListFilesResponse, + UpdateFileRequest, +) +from .generic import GenericArtifact from .package import ( DeletePackageRequest, GetPackageRequest, ListPackagesRequest, ListPackagesResponse, Package, + UpdatePackageRequest, ) from .repository import ( CleanupPolicy, @@ -62,6 +80,15 @@ UpstreamPolicy, VirtualRepositoryConfig, ) +from .rule import ( + CreateRuleRequest, + DeleteRuleRequest, + GetRuleRequest, + ListRulesRequest, + ListRulesResponse, + Rule, + UpdateRuleRequest, +) from .service import OperationMetadata from .settings import ( GetProjectSettingsRequest, @@ -84,6 +111,7 @@ GetVersionRequest, ListVersionsRequest, ListVersionsResponse, + UpdateVersionRequest, Version, VersionView, ) @@ -120,16 +148,26 @@ "MavenArtifact", "NpmPackage", "PythonPackage", + "Attachment", + "CreateAttachmentRequest", + "DeleteAttachmentRequest", + "GetAttachmentRequest", + "ListAttachmentsRequest", + "ListAttachmentsResponse", + "DeleteFileRequest", "File", "GetFileRequest", "Hash", "ListFilesRequest", "ListFilesResponse", + "UpdateFileRequest", + "GenericArtifact", "DeletePackageRequest", "GetPackageRequest", "ListPackagesRequest", "ListPackagesResponse", "Package", + "UpdatePackageRequest", "CleanupPolicy", "CleanupPolicyCondition", "CleanupPolicyMostRecentVersions", @@ -143,6 +181,13 @@ "UpdateRepositoryRequest", "UpstreamPolicy", "VirtualRepositoryConfig", + "CreateRuleRequest", + "DeleteRuleRequest", + "GetRuleRequest", + "ListRulesRequest", + "ListRulesResponse", + "Rule", + "UpdateRuleRequest", "OperationMetadata", "GetProjectSettingsRequest", "ProjectSettings", @@ -160,6 +205,7 @@ "GetVersionRequest", "ListVersionsRequest", "ListVersionsResponse", + "UpdateVersionRequest", "Version", "VersionView", "GetVPCSCConfigRequest", diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/artifact.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/artifact.py index 5b1950637fab..40db288339c7 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/artifact.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/artifact.py @@ -143,6 +143,7 @@ class ListDockerImagesRequest(proto.Message): whose docker images will be listed. page_size (int): The maximum number of artifacts to return. + Maximum page size is 1,000. page_token (str): The next_page_token value returned from a previous list request, if any. @@ -282,6 +283,7 @@ class ListMavenArtifactsRequest(proto.Message): whose maven artifacts will be listed. page_size (int): The maximum number of artifacts to return. + Maximum page size is 1,000. page_token (str): The next_page_token value returned from a previous list request, if any. @@ -402,6 +404,7 @@ class ListNpmPackagesRequest(proto.Message): whose npm packages will be listed. page_size (int): The maximum number of artifacts to return. + Maximum page size is 1,000. page_token (str): The next_page_token value returned from a previous list request, if any. @@ -526,6 +529,7 @@ class ListPythonPackagesRequest(proto.Message): whose python packages will be listed. page_size (int): The maximum number of artifacts to return. + Maximum page size is 1,000. page_token (str): The next_page_token value returned from a previous list request, if any. diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/attachment.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/attachment.py new file mode 100644 index 000000000000..027d5f1b59a7 --- /dev/null +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/attachment.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.devtools.artifactregistry.v1", + manifest={ + "Attachment", + "ListAttachmentsRequest", + "ListAttachmentsResponse", + "GetAttachmentRequest", + "CreateAttachmentRequest", + "DeleteAttachmentRequest", + }, +) + + +class Attachment(proto.Message): + r"""An Attachment refers to additional metadata that can be + attached to artifacts in Artifact Registry. An attachment + consists of one or more files. + + Attributes: + name (str): + The name of the attachment. E.g. + ``projects/p1/locations/us/repositories/repo/attachments/sbom``. + target (str): + Required. The target the attachment is for, can be a + Version, Package or Repository. E.g. + ``projects/p1/locations/us-central1/repositories/repo1/packages/p1/versions/v1``. + type_ (str): + Type of attachment. E.g. ``application/vnd.spdx+json`` + attachment_namespace (str): + The namespace this attachment belongs to. E.g. If an + attachment is created by artifact analysis, namespace is set + to ``artifactanalysis.googleapis.com``. + annotations (MutableMapping[str, str]): + Optional. User annotations. These attributes + can only be set and used by the user, and not by + Artifact Registry. See + https://google.aip.dev/128#annotations for more + details such as format and size limitations. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the attachment was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the attachment was + last updated. + files (MutableSequence[str]): + Required. The files that belong to this attachment. If the + file ID part contains slashes, they are escaped. E.g. + ``projects/p1/locations/us-central1/repositories/repo1/files/sha:``. + oci_version_name (str): + Output only. The name of the OCI version that this + attachment created. Only populated for Docker attachments. + E.g. + ``projects/p1/locations/us-central1/repositories/repo1/packages/p1/versions/v1``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + target: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + attachment_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + oci_version_name: str = proto.Field( + proto.STRING, + number=9, + ) + + +class ListAttachmentsRequest(proto.Message): + r"""The request to list attachments. + + Attributes: + parent (str): + Required. The name of the parent resource + whose attachments will be listed. + filter (str): + Optional. An expression for filtering the results of the + request. Filter rules are case insensitive. The fields + eligible for filtering are: + + - ``target`` + - ``type`` + - ``attachment_namespace`` + page_size (int): + The maximum number of attachments to return. + Maximum page size is 1,000. + page_token (str): + The next_page_token value returned from a previous list + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAttachmentsResponse(proto.Message): + r"""The response from listing attachments. + + Attributes: + attachments (MutableSequence[google.cloud.artifactregistry_v1.types.Attachment]): + The attachments returned. + next_page_token (str): + The token to retrieve the next page of + attachments, or empty if there are no more + attachments to return. + """ + + @property + def raw_page(self): + return self + + attachments: MutableSequence["Attachment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Attachment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAttachmentRequest(proto.Message): + r"""The request to retrieve an attachment. + + Attributes: + name (str): + Required. The name of the attachment to + retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAttachmentRequest(proto.Message): + r"""The request to create a new attachment. + + Attributes: + parent (str): + Required. The name of the parent resource + where the attachment will be created. + attachment_id (str): + Required. The attachment id to use for this + attachment. + attachment (google.cloud.artifactregistry_v1.types.Attachment): + Required. The attachment to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + attachment_id: str = proto.Field( + proto.STRING, + number=2, + ) + attachment: "Attachment" = proto.Field( + proto.MESSAGE, + number=3, + message="Attachment", + ) + + +class DeleteAttachmentRequest(proto.Message): + r"""The request to delete an attachment. + + Attributes: + name (str): + Required. The name of the attachment to + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/file.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/file.py index 25cdafe65255..684c98fe873e 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/file.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/file.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -28,6 +29,8 @@ "ListFilesRequest", "ListFilesResponse", "GetFileRequest", + "DeleteFileRequest", + "UpdateFileRequest", }, ) @@ -75,10 +78,8 @@ class File(proto.Message): Attributes: name (str): The name of the file, for example: - - "projects/p1/locations/us-central1/repositories/repo1/files/a%2Fb%2Fc.txt". - If the file ID part contains slashes, they are - escaped. + ``projects/p1/locations/us-central1/repositories/repo1/files/a%2Fb%2Fc.txt``. + If the file ID part contains slashes, they are escaped. size_bytes (int): The size of the File in bytes. hashes (MutableSequence[google.cloud.artifactregistry_v1.types.Hash]): @@ -96,6 +97,8 @@ class File(proto.Message): Output only. The time when the last attempt to refresh the file's data was made. Only set when the repository is remote. + annotations (MutableMapping[str, str]): + Optional. Client specified annotations. """ name: str = proto.Field( @@ -130,6 +133,11 @@ class File(proto.Message): number=8, message=timestamp_pb2.Timestamp, ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) class ListFilesRequest(proto.Message): @@ -147,16 +155,59 @@ class ListFilesRequest(proto.Message): - ``name`` - ``owner`` + - ``annotations`` + + Examples of using a filter: + + To filter the results of your request to files with the name + ``my_file.txt`` in project ``my-project`` in the + ``us-central`` region, in repository ``my-repo``, append the + following filter expression to your request: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/files/my-file.txt"`` + + You can also use wildcards to match any number of characters + before or after the value: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/files/my-*"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/files/*file.txt"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/files/*file*"`` + + To filter the results of your request to files owned by the + version ``1.0`` in package ``pkg1``, append the following + filter expression to your request: + + - ``owner="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/versions/1.0"`` - An example of using a filter: + To filter the results of your request to files with the + annotation key-value pair [``external_link``: + ``external_link_value``], append the following filter + expression to your request: - - ``name="projects/p1/locations/us-central1/repositories/repo1/files/a/b/*"`` - --> Files with an ID starting with "a/b/". - - ``owner="projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/1.0"`` - --> Files owned by the version ``1.0`` in package - ``pkg1``. + - ``"annotations.external_link:external_link_value"`` + + To filter just for a specific annotation key + ``external_link``, append the following filter expression to + your request: + + - ``"annotations.external_link"`` + + If the annotation key or value contains special characters, + you can escape them by surrounding the value with backticks. + For example, to filter the results of your request to files + with the annotation key-value pair + [``external.link``:``https://example.com/my-file``], append + the following filter expression to your request: + + - :literal:`"annotations.`external.link`:`https://example.com/my-file`"` + + You can also filter with annotations with a wildcard to + match any number of characters before or after the value: + + - :literal:`"annotations.*_link:`*example.com*`"` page_size (int): The maximum number of files to return. + Maximum page size is 1,000. page_token (str): The next_page_token value returned from a previous list request, if any. @@ -226,4 +277,43 @@ class GetFileRequest(proto.Message): ) +class DeleteFileRequest(proto.Message): + r"""The request to delete a file. + + Attributes: + name (str): + Required. The name of the file to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateFileRequest(proto.Message): + r"""The request to update a file. + + Attributes: + file (google.cloud.artifactregistry_v1.types.File): + Required. The File that replaces the resource + on the server. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + """ + + file: "File" = proto.Field( + proto.MESSAGE, + number=1, + message="File", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/generic.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/generic.py new file mode 100644 index 000000000000..f6a2134dcc6b --- /dev/null +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/generic.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.devtools.artifactregistry.v1", + manifest={ + "GenericArtifact", + }, +) + + +class GenericArtifact(proto.Message): + r"""GenericArtifact represents a generic artifact + + Attributes: + name (str): + Resource name of the generic artifact. project, location, + repository, package_id and version_id create a unique + generic artifact. i.e. + "projects/test-project/locations/us-west4/repositories/test-repo/ + genericArtifacts/package_id:version_id". + version (str): + The version of the generic artifact. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Generic module + is created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Generic module + is updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/package.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/package.py index fb57d551a255..0b2d1b2a11fa 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/package.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/package.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -28,6 +29,7 @@ "ListPackagesResponse", "GetPackageRequest", "DeletePackageRequest", + "UpdatePackageRequest", }, ) @@ -49,6 +51,8 @@ class Package(proto.Message): The time when the package was last updated. This includes publishing a new version of the package. + annotations (MutableMapping[str, str]): + Optional. Client specified annotations. """ name: str = proto.Field( @@ -69,6 +73,11 @@ class Package(proto.Message): number=6, message=timestamp_pb2.Timestamp, ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) class ListPackagesRequest(proto.Message): @@ -84,6 +93,58 @@ class ListPackagesRequest(proto.Message): page_token (str): The next_page_token value returned from a previous list request, if any. + filter (str): + Optional. An expression for filtering the results of the + request. Filter rules are case insensitive. The fields + eligible for filtering are: + + - ``name`` + - ``annotations`` + + Examples of using a filter: + + To filter the results of your request to packages with the + name ``my-package`` in project ``my-project`` in the + ``us-central`` region, in repository ``my-repo``, append the + following filter expression to your request: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package"`` + + You can also use wildcards to match any number of characters + before or after the value: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-*"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/*package"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/*pack*"`` + + To filter the results of your request to packages with the + annotation key-value pair [``external_link``: + ``external_link_value``], append the following filter + expression to your request": + + - ``"annotations.external_link:external_link_value"`` + + To filter the results just for a specific annotation key + ``external_link``, append the following filter expression to + your request: + + - ``"annotations.external_link"`` + + If the annotation key or value contains special characters, + you can escape them by surrounding the value with backticks. + For example, to filter the results of your request to + packages with the annotation key-value pair + [``external.link``:``https://example.com/my-package``], + append the following filter expression to your request: + + - :literal:`"annotations.`external.link`:`https://example.com/my-package`"` + + You can also filter with annotations with a wildcard to + match any number of characters before or after the value: + + - :literal:`"annotations.*_link:`*example.com*`"` + order_by (str): + Optional. The field to order the results by. """ parent: str = proto.Field( @@ -98,6 +159,14 @@ class ListPackagesRequest(proto.Message): proto.STRING, number=3, ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) class ListPackagesResponse(proto.Message): @@ -156,4 +225,29 @@ class DeletePackageRequest(proto.Message): ) +class UpdatePackageRequest(proto.Message): + r"""The request to update a package. + + Attributes: + package (google.cloud.artifactregistry_v1.types.Package): + The package that replaces the resource on the + server. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + """ + + package: "Package" = proto.Field( + proto.MESSAGE, + number=1, + message="Package", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/repository.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/repository.py index 7dad052e30d8..74e18ace3e93 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/repository.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/repository.py @@ -304,12 +304,22 @@ class RemoteRepositoryConfig(proto.Message): Specific settings for a Yum remote repository. + This field is a member of `oneof`_ ``remote_source``. + common_repository (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.CommonRemoteRepository): + Common remote repository settings. + Used as the remote repository upstream URL. + This field is a member of `oneof`_ ``remote_source``. description (str): The description of the remote source. upstream_credentials (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.UpstreamCredentials): Optional. The credentials used to access the remote repository. + disable_upstream_validation (bool): + Input only. A create/update remote repo + option to avoid making a HEAD/GET request to + validate a remote repo and any supplied upstream + credentials. """ class UpstreamCredentials(proto.Message): @@ -356,6 +366,11 @@ class UsernamePasswordCredentials(proto.Message): class DockerRepository(proto.Message): r"""Configuration for a Docker remote repository. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -363,6 +378,10 @@ class DockerRepository(proto.Message): One of the publicly available Docker repositories supported by Artifact Registry. + This field is a member of `oneof`_ ``upstream``. + custom_repository (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.DockerRepository.CustomRepository): + Customer-specified remote repository. + This field is a member of `oneof`_ ``upstream``. """ @@ -379,16 +398,42 @@ class PublicRepository(proto.Enum): PUBLIC_REPOSITORY_UNSPECIFIED = 0 DOCKER_HUB = 1 + class CustomRepository(proto.Message): + r"""Customer-specified publicly available remote repository. + + Attributes: + uri (str): + An http/https uri reference to the custom + remote repository, for ex: + "https://registry-1.docker.io". + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + public_repository: "RemoteRepositoryConfig.DockerRepository.PublicRepository" = proto.Field( proto.ENUM, number=1, oneof="upstream", enum="RemoteRepositoryConfig.DockerRepository.PublicRepository", ) + custom_repository: "RemoteRepositoryConfig.DockerRepository.CustomRepository" = proto.Field( + proto.MESSAGE, + number=3, + oneof="upstream", + message="RemoteRepositoryConfig.DockerRepository.CustomRepository", + ) class MavenRepository(proto.Message): r"""Configuration for a Maven remote repository. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -396,6 +441,10 @@ class MavenRepository(proto.Message): One of the publicly available Maven repositories supported by Artifact Registry. + This field is a member of `oneof`_ ``upstream``. + custom_repository (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.MavenRepository.CustomRepository): + Customer-specified remote repository. + This field is a member of `oneof`_ ``upstream``. """ @@ -412,6 +461,21 @@ class PublicRepository(proto.Enum): PUBLIC_REPOSITORY_UNSPECIFIED = 0 MAVEN_CENTRAL = 1 + class CustomRepository(proto.Message): + r"""Customer-specified publicly available remote repository. + + Attributes: + uri (str): + An http/https uri reference to the upstream + remote repository, for ex: + "https://my.maven.registry/". + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + public_repository: "RemoteRepositoryConfig.MavenRepository.PublicRepository" = ( proto.Field( proto.ENUM, @@ -420,10 +484,23 @@ class PublicRepository(proto.Enum): enum="RemoteRepositoryConfig.MavenRepository.PublicRepository", ) ) + custom_repository: "RemoteRepositoryConfig.MavenRepository.CustomRepository" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="upstream", + message="RemoteRepositoryConfig.MavenRepository.CustomRepository", + ) + ) class NpmRepository(proto.Message): r"""Configuration for a Npm remote repository. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -431,6 +508,10 @@ class NpmRepository(proto.Message): One of the publicly available Npm repositories supported by Artifact Registry. + This field is a member of `oneof`_ ``upstream``. + custom_repository (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.NpmRepository.CustomRepository): + Customer-specified remote repository. + This field is a member of `oneof`_ ``upstream``. """ @@ -447,6 +528,21 @@ class PublicRepository(proto.Enum): PUBLIC_REPOSITORY_UNSPECIFIED = 0 NPMJS = 1 + class CustomRepository(proto.Message): + r"""Customer-specified publicly available remote repository. + + Attributes: + uri (str): + An http/https uri reference to the upstream + remote repository, for ex: + "https://my.npm.registry/". + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + public_repository: "RemoteRepositoryConfig.NpmRepository.PublicRepository" = ( proto.Field( proto.ENUM, @@ -455,10 +551,23 @@ class PublicRepository(proto.Enum): enum="RemoteRepositoryConfig.NpmRepository.PublicRepository", ) ) + custom_repository: "RemoteRepositoryConfig.NpmRepository.CustomRepository" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="upstream", + message="RemoteRepositoryConfig.NpmRepository.CustomRepository", + ) + ) class PythonRepository(proto.Message): r"""Configuration for a Python remote repository. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -466,6 +575,10 @@ class PythonRepository(proto.Message): One of the publicly available Python repositories supported by Artifact Registry. + This field is a member of `oneof`_ ``upstream``. + custom_repository (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.PythonRepository.CustomRepository): + Customer-specified remote repository. + This field is a member of `oneof`_ ``upstream``. """ @@ -482,16 +595,42 @@ class PublicRepository(proto.Enum): PUBLIC_REPOSITORY_UNSPECIFIED = 0 PYPI = 1 + class CustomRepository(proto.Message): + r"""Customer-specified publicly available remote repository. + + Attributes: + uri (str): + An http/https uri reference to the upstream + remote repository, for ex: + "https://my.python.registry/". + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + public_repository: "RemoteRepositoryConfig.PythonRepository.PublicRepository" = proto.Field( proto.ENUM, number=1, oneof="upstream", enum="RemoteRepositoryConfig.PythonRepository.PublicRepository", ) + custom_repository: "RemoteRepositoryConfig.PythonRepository.CustomRepository" = proto.Field( + proto.MESSAGE, + number=3, + oneof="upstream", + message="RemoteRepositoryConfig.PythonRepository.CustomRepository", + ) class AptRepository(proto.Message): r"""Configuration for an Apt remote repository. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -499,6 +638,10 @@ class AptRepository(proto.Message): One of the publicly available Apt repositories supported by Artifact Registry. + This field is a member of `oneof`_ ``upstream``. + custom_repository (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.AptRepository.CustomRepository): + Customer-specified remote repository. + This field is a member of `oneof`_ ``upstream``. """ @@ -525,10 +668,13 @@ class RepositoryBase(proto.Enum): Debian. UBUNTU (2): Ubuntu LTS/Pro. + DEBIAN_SNAPSHOT (3): + Archived Debian. """ REPOSITORY_BASE_UNSPECIFIED = 0 DEBIAN = 1 UBUNTU = 2 + DEBIAN_SNAPSHOT = 3 repository_base: "RemoteRepositoryConfig.AptRepository.PublicRepository.RepositoryBase" = proto.Field( proto.ENUM, @@ -540,6 +686,21 @@ class RepositoryBase(proto.Enum): number=2, ) + class CustomRepository(proto.Message): + r"""Customer-specified publicly available remote repository. + + Attributes: + uri (str): + An http/https uri reference to the upstream + remote repository, for ex: + "https://my.apt.registry/". + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + public_repository: "RemoteRepositoryConfig.AptRepository.PublicRepository" = ( proto.Field( proto.MESSAGE, @@ -548,10 +709,23 @@ class RepositoryBase(proto.Enum): message="RemoteRepositoryConfig.AptRepository.PublicRepository", ) ) + custom_repository: "RemoteRepositoryConfig.AptRepository.CustomRepository" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="upstream", + message="RemoteRepositoryConfig.AptRepository.CustomRepository", + ) + ) class YumRepository(proto.Message): r"""Configuration for a Yum remote repository. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -559,6 +733,10 @@ class YumRepository(proto.Message): One of the publicly available Yum repositories supported by Artifact Registry. + This field is a member of `oneof`_ ``upstream``. + custom_repository (google.cloud.artifactregistry_v1.types.RemoteRepositoryConfig.YumRepository.CustomRepository): + Customer-specified remote repository. + This field is a member of `oneof`_ ``upstream``. """ @@ -613,6 +791,21 @@ class RepositoryBase(proto.Enum): number=2, ) + class CustomRepository(proto.Message): + r"""Customer-specified publicly available remote repository. + + Attributes: + uri (str): + An http/https uri reference to the upstream + remote repository, for ex: + "https://my.yum.registry/". + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + public_repository: "RemoteRepositoryConfig.YumRepository.PublicRepository" = ( proto.Field( proto.MESSAGE, @@ -621,6 +814,28 @@ class RepositoryBase(proto.Enum): message="RemoteRepositoryConfig.YumRepository.PublicRepository", ) ) + custom_repository: "RemoteRepositoryConfig.YumRepository.CustomRepository" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="upstream", + message="RemoteRepositoryConfig.YumRepository.CustomRepository", + ) + ) + + class CommonRemoteRepository(proto.Message): + r"""Common remote repository settings type. + + Attributes: + uri (str): + Required. A common public repository base for + remote repository. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) docker_repository: DockerRepository = proto.Field( proto.MESSAGE, @@ -658,6 +873,12 @@ class RepositoryBase(proto.Enum): oneof="remote_source", message=YumRepository, ) + common_repository: CommonRemoteRepository = proto.Field( + proto.MESSAGE, + number=14, + oneof="remote_source", + message=CommonRemoteRepository, + ) description: str = proto.Field( proto.STRING, number=1, @@ -667,6 +888,10 @@ class RepositoryBase(proto.Enum): number=9, message=UpstreamCredentials, ) + disable_upstream_validation: bool = proto.Field( + proto.BOOL, + number=12, + ) class Repository(proto.Message): @@ -705,6 +930,8 @@ class Repository(proto.Message): name (str): The name of the repository, for example: ``projects/p1/locations/us-central1/repositories/repo1``. + For each location in a project, repository names must be + unique. format_ (google.cloud.artifactregistry_v1.types.Repository.Format): Optional. The format of packages that are stored in the repository. @@ -755,6 +982,16 @@ class Repository(proto.Message): Optional. If true, the cleanup pipeline is prevented from deleting versions in this repository. + vulnerability_scanning_config (google.cloud.artifactregistry_v1.types.Repository.VulnerabilityScanningConfig): + Optional. Config and state for vulnerability + scanning of resources within this Repository. + disallow_unspecified_mode (bool): + Optional. If this is true, an unspecified + repo type will be treated as error rather than + defaulting to standard. + satisfies_pzi (bool): + Output only. If set, the repository satisfies + physical zone isolation. """ class Format(proto.Enum): @@ -779,6 +1016,8 @@ class Format(proto.Enum): Kubeflow Pipelines package format. GO (10): Go package format. + GENERIC (11): + Generic package format. """ FORMAT_UNSPECIFIED = 0 DOCKER = 1 @@ -789,6 +1028,7 @@ class Format(proto.Enum): PYTHON = 8 KFP = 9 GO = 10 + GENERIC = 11 class Mode(proto.Enum): r"""The mode configures the repository to serve artifacts from @@ -872,6 +1112,91 @@ class DockerRepositoryConfig(proto.Message): number=1, ) + class VulnerabilityScanningConfig(proto.Message): + r"""Config on whether to perform vulnerability scanning for + resources in this repository, as well as output fields + describing current state. + + Attributes: + enablement_config (google.cloud.artifactregistry_v1.types.Repository.VulnerabilityScanningConfig.EnablementConfig): + Optional. Config for whether this repository + has vulnerability scanning disabled. + last_enable_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time this repository + config was enabled. + enablement_state (google.cloud.artifactregistry_v1.types.Repository.VulnerabilityScanningConfig.EnablementState): + Output only. State of feature enablement, + combining repository enablement config and API + enablement state. + enablement_state_reason (str): + Output only. Reason for the repository state. + """ + + class EnablementConfig(proto.Enum): + r"""Config for vulnerability scanning of resources in this + repository. + + Values: + ENABLEMENT_CONFIG_UNSPECIFIED (0): + Not set. This will be treated as INHERITED. + INHERITED (1): + Scanning is Enabled, but dependent on API + enablement. + DISABLED (2): + No automatic vulnerability scanning will be + performed for this repository. + """ + ENABLEMENT_CONFIG_UNSPECIFIED = 0 + INHERITED = 1 + DISABLED = 2 + + class EnablementState(proto.Enum): + r"""Describes the state of vulnerability scanning in this + repository, including both repository enablement and API + enablement. + + Values: + ENABLEMENT_STATE_UNSPECIFIED (0): + Enablement state is unclear. + SCANNING_UNSUPPORTED (1): + Repository does not support vulnerability + scanning. + SCANNING_DISABLED (2): + Vulnerability scanning is disabled for this + repository. + SCANNING_ACTIVE (3): + Vulnerability scanning is active for this + repository. + """ + ENABLEMENT_STATE_UNSPECIFIED = 0 + SCANNING_UNSUPPORTED = 1 + SCANNING_DISABLED = 2 + SCANNING_ACTIVE = 3 + + enablement_config: "Repository.VulnerabilityScanningConfig.EnablementConfig" = ( + proto.Field( + proto.ENUM, + number=1, + enum="Repository.VulnerabilityScanningConfig.EnablementConfig", + ) + ) + last_enable_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + enablement_state: "Repository.VulnerabilityScanningConfig.EnablementState" = ( + proto.Field( + proto.ENUM, + number=3, + enum="Repository.VulnerabilityScanningConfig.EnablementState", + ) + ) + enablement_state_reason: str = proto.Field( + proto.STRING, + number=4, + ) + maven_config: MavenRepositoryConfig = proto.Field( proto.MESSAGE, number=9, @@ -951,6 +1276,19 @@ class DockerRepositoryConfig(proto.Message): proto.BOOL, number=18, ) + vulnerability_scanning_config: VulnerabilityScanningConfig = proto.Field( + proto.MESSAGE, + number=19, + message=VulnerabilityScanningConfig, + ) + disallow_unspecified_mode: bool = proto.Field( + proto.BOOL, + number=21, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=22, + ) class ListRepositoriesRequest(proto.Message): @@ -966,6 +1304,30 @@ class ListRepositoriesRequest(proto.Message): page_token (str): The next_page_token value returned from a previous list request, if any. + filter (str): + Optional. An expression for filtering the results of the + request. Filter rules are case insensitive. The fields + eligible for filtering are: + + - ``name`` + + Examples of using a filter: + + To filter the results of your request to repositories with + the name ``my-repo`` in project ``my-project`` in the + ``us-central`` region, append the following filter + expression to your request: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo"`` + + You can also use wildcards to match any number of characters + before or after the value: + + - ``name="projects/my-project/locations/us-central1/repositories/my-*"`` + - ``name="projects/my-project/locations/us-central1/repositories/*repo"`` + - ``name="projects/my-project/locations/us-central1/repositories/*repo*"`` + order_by (str): + Optional. The field to order the results by. """ parent: str = proto.Field( @@ -980,6 +1342,14 @@ class ListRepositoriesRequest(proto.Message): proto.STRING, number=3, ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) class ListRepositoriesResponse(proto.Message): diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/rule.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/rule.py new file mode 100644 index 000000000000..c436dd859ea8 --- /dev/null +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/rule.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.devtools.artifactregistry.v1", + manifest={ + "Rule", + "ListRulesRequest", + "ListRulesResponse", + "GetRuleRequest", + "CreateRuleRequest", + "UpdateRuleRequest", + "DeleteRuleRequest", + }, +) + + +class Rule(proto.Message): + r"""A rule defines the deny or allow action of the operation it + applies to and the conditions required for the rule to apply. + You can set one rule for an entire repository and one rule for + each package within. + + Attributes: + name (str): + The name of the rule, for example: + ``projects/p1/locations/us-central1/repositories/repo1/rules/rule1``. + action (google.cloud.artifactregistry_v1.types.Rule.Action): + The action this rule takes. + operation (google.cloud.artifactregistry_v1.types.Rule.Operation): + + condition (google.type.expr_pb2.Expr): + Optional. A CEL expression for conditions + that must be met in order for the rule to apply. + If not provided, the rule matches all objects. + package_id (str): + The package ID the rule applies to. + If empty, this rule applies to all packages + inside the repository. + """ + + class Action(proto.Enum): + r"""Defines the action of the rule. + + Values: + ACTION_UNSPECIFIED (0): + Action not specified. + ALLOW (1): + Allow the operation. + DENY (2): + Deny the operation. + """ + ACTION_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + class Operation(proto.Enum): + r"""The operation the rule applies to. + + Values: + OPERATION_UNSPECIFIED (0): + Operation not specified. + DOWNLOAD (1): + Download operation. + """ + OPERATION_UNSPECIFIED = 0 + DOWNLOAD = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + action: Action = proto.Field( + proto.ENUM, + number=2, + enum=Action, + ) + operation: Operation = proto.Field( + proto.ENUM, + number=3, + enum=Operation, + ) + condition: expr_pb2.Expr = proto.Field( + proto.MESSAGE, + number=4, + message=expr_pb2.Expr, + ) + package_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListRulesRequest(proto.Message): + r"""The request to list rules. + + Attributes: + parent (str): + Required. The name of the parent repository whose rules will + be listed. For example: + ``projects/p1/locations/us-central1/repositories/repo1``. + page_size (int): + The maximum number of rules to return. + Maximum page size is 1,000. + page_token (str): + The next_page_token value returned from a previous list + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListRulesResponse(proto.Message): + r"""The response from listing rules. + + Attributes: + rules (MutableSequence[google.cloud.artifactregistry_v1.types.Rule]): + The rules returned. + next_page_token (str): + The token to retrieve the next page of rules, + or empty if there are no more rules to return. + """ + + @property + def raw_page(self): + return self + + rules: MutableSequence["Rule"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Rule", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetRuleRequest(proto.Message): + r"""The request to retrieve a rule. + + Attributes: + name (str): + Required. The name of the rule to retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateRuleRequest(proto.Message): + r"""The request to create a new rule. + + Attributes: + parent (str): + Required. The name of the parent resource + where the rule will be created. + rule_id (str): + The rule id to use for this repository. + rule (google.cloud.artifactregistry_v1.types.Rule): + The rule to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + rule: "Rule" = proto.Field( + proto.MESSAGE, + number=3, + message="Rule", + ) + + +class UpdateRuleRequest(proto.Message): + r"""The request to update a rule. + + Attributes: + rule (google.cloud.artifactregistry_v1.types.Rule): + The rule that replaces the resource on the + server. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + """ + + rule: "Rule" = proto.Field( + proto.MESSAGE, + number=1, + message="Rule", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteRuleRequest(proto.Message): + r"""The request to delete a rule. + + Attributes: + name (str): + Required. The name of the rule to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/settings.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/settings.py index c05639001209..642896833e18 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/settings.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/settings.py @@ -46,6 +46,9 @@ class ProjectSettings(proto.Message): legacy_redirection_state (google.cloud.artifactregistry_v1.types.ProjectSettings.RedirectionState): The redirection state of the legacy repositories in this project. + pull_percent (int): + The percentage of pull traffic to redirect + from GCR to AR when using partial redirection. """ class RedirectionState(proto.Enum): @@ -61,11 +64,19 @@ class RedirectionState(proto.Enum): REDIRECTION_FROM_GCR_IO_FINALIZED (3): Redirection is enabled, and has been finalized so cannot be reverted. + REDIRECTION_FROM_GCR_IO_ENABLED_AND_COPYING (5): + Redirection is enabled and missing images are + copied from GCR + REDIRECTION_FROM_GCR_IO_PARTIAL_AND_COPYING (6): + Redirection is partially enabled and missing + images are copied from GCR """ REDIRECTION_STATE_UNSPECIFIED = 0 REDIRECTION_FROM_GCR_IO_DISABLED = 1 REDIRECTION_FROM_GCR_IO_ENABLED = 2 REDIRECTION_FROM_GCR_IO_FINALIZED = 3 + REDIRECTION_FROM_GCR_IO_ENABLED_AND_COPYING = 5 + REDIRECTION_FROM_GCR_IO_PARTIAL_AND_COPYING = 6 name: str = proto.Field( proto.STRING, @@ -76,6 +87,10 @@ class RedirectionState(proto.Enum): number=2, enum=RedirectionState, ) + pull_percent: int = proto.Field( + proto.INT32, + number=3, + ) class GetProjectSettingsRequest(proto.Message): diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/tag.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/tag.py index 96774bfd453d..62236adbcecf 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/tag.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/tag.py @@ -46,11 +46,10 @@ class Tag(proto.Message): escaped. The tag part can only have characters in [a-zA-Z0-9-._~:@], anything else must be URL encoded. version (str): - The name of the version the tag refers to, - for example: - "projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/sha256:5243811" - If the package or version ID parts contain - slashes, the slashes are escaped. + The name of the version the tag refers to, for example: + ``projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/sha256:5243811`` + If the package or version ID parts contain slashes, the + slashes are escaped. """ name: str = proto.Field( @@ -76,16 +75,34 @@ class ListTagsRequest(proto.Message): Filter rules are case insensitive. The fields eligible for filtering are: + - ``name`` - ``version`` - An example of using a filter: + Examples of using a filter: - - ``version="projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/1.0"`` - --> Tags that are applied to the version ``1.0`` in - package ``pkg1``. + To filter the results of your request to tags with the name + ``my-tag`` in package ``my-package`` in repository + ``my-repo`` in project "``y-project`` in the us-central + region, append the following filter expression to your + request: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/tags/my-tag"`` + + You can also use wildcards to match any number of characters + before or after the value: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/tags/my*"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/tags/*tag"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/tags/*tag*"`` + + To filter the results of your request to tags applied to the + version ``1.0`` in package ``my-package``, append the + following filter expression to your request: + + - ``version="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/versions/1.0"`` page_size (int): The maximum number of tags to return. Maximum - page size is 10,000. + page size is 1,000. page_token (str): The next_page_token value returned from a previous list request, if any. diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/version.py index 01ddc755b786..de1064f1cb9f 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/version.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -34,6 +35,7 @@ "DeleteVersionRequest", "BatchDeleteVersionsRequest", "BatchDeleteVersionsMetadata", + "UpdateVersionRequest", }, ) @@ -65,10 +67,9 @@ class Version(proto.Message): Attributes: name (str): The name of the version, for example: - - "projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/art1". - If the package or version ID parts contain - slashes, the slashes are escaped. + ``projects/p1/locations/us-central1/repositories/repo1/packages/pkg1/versions/art1``. + If the package or version ID parts contain slashes, the + slashes are escaped. description (str): Optional. Description of the version, as specified in its metadata. @@ -87,6 +88,8 @@ class Version(proto.Message): resources could be: [DockerImage][google.devtools.artifactregistry.v1.DockerImage] [MavenArtifact][google.devtools.artifactregistry.v1.MavenArtifact] + annotations (MutableMapping[str, str]): + Optional. Client specified annotations. """ name: str = proto.Field( @@ -117,6 +120,11 @@ class Version(proto.Message): number=8, message=struct_pb2.Struct, ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) class ListVersionsRequest(proto.Message): @@ -137,6 +145,56 @@ class ListVersionsRequest(proto.Message): response. order_by (str): Optional. The field to order the results by. + filter (str): + Optional. An expression for filtering the results of the + request. Filter rules are case insensitive. The fields + eligible for filtering are: + + - ``name`` + - ``annotations`` + + Examples of using a filter: + + To filter the results of your request to versions with the + name ``my-version`` in project ``my-project`` in the + ``us-central`` region, in repository ``my-repo``, append the + following filter expression to your request: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/versions/my-version"`` + + You can also use wildcards to match any number of characters + before or after the value: + + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/versions/*version"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/versions/my*"`` + - ``name="projects/my-project/locations/us-central1/repositories/my-repo/packages/my-package/versions/*version*"`` + + To filter the results of your request to versions with the + annotation key-value pair [``external_link``: + ``external_link_value``], append the following filter + expression to your request: + + - ``"annotations.external_link:external_link_value"`` + + To filter just for a specific annotation key + ``external_link``, append the following filter expression to + your request: + + - ``"annotations.external_link"`` + + If the annotation key or value contains special characters, + you can escape them by surrounding the value with backticks. + For example, to filter the results of your request to + versions with the annotation key-value pair + [``external.link``:``https://example.com/my-version``], + append the following filter expression to your request: + + - :literal:`"annotations.`external.link`:`https://example.com/my-version`"` + + You can also filter with annotations with a wildcard to + match any number of characters before or after the value: + + - :literal:`"annotations.*_link:`*example.com*`"` """ parent: str = proto.Field( @@ -160,6 +218,10 @@ class ListVersionsRequest(proto.Message): proto.STRING, number=5, ) + filter: str = proto.Field( + proto.STRING, + number=6, + ) class ListVersionsResponse(proto.Message): @@ -277,4 +339,29 @@ class BatchDeleteVersionsMetadata(proto.Message): ) +class UpdateVersionRequest(proto.Message): + r"""The request to update a version. + + Attributes: + version (google.cloud.artifactregistry_v1.types.Version): + Required. The Version that replaces the + resource on the server. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + """ + + version: "Version" = proto.Field( + proto.MESSAGE, + number=1, + message="Version", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py index 739fdfae141c..0b9427f4e8a5 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py index 4cc2b146421e..f696780b4097 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py @@ -631,36 +631,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ArtifactRegistryClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -670,13 +640,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ArtifactRegistryClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_attachment_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_attachment_async.py new file mode 100644 index 000000000000..d3ad6684f5a3 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_attachment_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_CreateAttachment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_create_attachment(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + attachment = artifactregistry_v1.Attachment() + attachment.target = "target_value" + attachment.files = ['files_value1', 'files_value2'] + + request = artifactregistry_v1.CreateAttachmentRequest( + parent="parent_value", + attachment_id="attachment_id_value", + attachment=attachment, + ) + + # Make the request + operation = client.create_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_CreateAttachment_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_attachment_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_attachment_sync.py new file mode 100644 index 000000000000..d5fad5c6844c --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_attachment_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_CreateAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_create_attachment(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + attachment = artifactregistry_v1.Attachment() + attachment.target = "target_value" + attachment.files = ['files_value1', 'files_value2'] + + request = artifactregistry_v1.CreateAttachmentRequest( + parent="parent_value", + attachment_id="attachment_id_value", + attachment=attachment, + ) + + # Make the request + operation = client.create_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_CreateAttachment_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_rule_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_rule_async.py new file mode 100644 index 000000000000..4b582600ee4e --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_rule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_CreateRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_create_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.CreateRuleRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_rule(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_CreateRule_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_rule_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_rule_sync.py new file mode 100644 index 000000000000..96c32e9b05f1 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_create_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_CreateRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_create_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.CreateRuleRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_rule(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_CreateRule_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_attachment_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_attachment_async.py new file mode 100644 index 000000000000..bd3b1ab4902a --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_attachment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_DeleteAttachment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_delete_attachment(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteAttachmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_DeleteAttachment_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_attachment_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_attachment_sync.py new file mode 100644 index 000000000000..2e79dc95b0af --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_attachment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_DeleteAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_delete_attachment(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteAttachmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_DeleteAttachment_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_file_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_file_async.py new file mode 100644 index 000000000000..8d0bf7ff67cc --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_file_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_DeleteFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_delete_file(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteFileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_file(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_DeleteFile_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_file_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_file_sync.py new file mode 100644 index 000000000000..238f21adb45b --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_file_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_DeleteFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_delete_file(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteFileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_file(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_DeleteFile_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_rule_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_rule_async.py new file mode 100644 index 000000000000..32548808a704 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_rule_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_DeleteRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_delete_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteRuleRequest( + name="name_value", + ) + + # Make the request + await client.delete_rule(request=request) + + +# [END artifactregistry_v1_generated_ArtifactRegistry_DeleteRule_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_rule_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_rule_sync.py new file mode 100644 index 000000000000..8a56824b0ffb --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_delete_rule_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_DeleteRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_delete_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.DeleteRuleRequest( + name="name_value", + ) + + # Make the request + client.delete_rule(request=request) + + +# [END artifactregistry_v1_generated_ArtifactRegistry_DeleteRule_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_attachment_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_attachment_async.py new file mode 100644 index 000000000000..7d0e0d5d7b4f --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_attachment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_GetAttachment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_get_attachment(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetAttachmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_attachment(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_GetAttachment_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_attachment_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_attachment_sync.py new file mode 100644 index 000000000000..e2e6d4351688 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_attachment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_GetAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_get_attachment(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetAttachmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_attachment(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_GetAttachment_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_rule_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_rule_async.py new file mode 100644 index 000000000000..3e5fe28e350b --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_rule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_GetRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_get_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetRuleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_rule(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_GetRule_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_rule_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_rule_sync.py new file mode 100644 index 000000000000..44ac01f86f23 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_get_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_GetRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_get_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.GetRuleRequest( + name="name_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_GetRule_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_attachments_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_attachments_async.py new file mode 100644 index 000000000000..772738a4c9d4 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_attachments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAttachments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_ListAttachments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_list_attachments(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ListAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attachments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_ListAttachments_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_attachments_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_attachments_sync.py new file mode 100644 index 000000000000..0d6d4ff820b6 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_attachments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAttachments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_ListAttachments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_list_attachments(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ListAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_attachments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_ListAttachments_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_rules_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_rules_async.py new file mode 100644 index 000000000000..df269c6af744 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_rules_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_ListRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_list_rules(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ListRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_rules(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_ListRules_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_rules_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_rules_sync.py new file mode 100644 index 000000000000..8d9149536243 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_list_rules_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_ListRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_list_rules(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ListRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_rules(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_ListRules_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_file_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_file_async.py new file mode 100644 index 000000000000..c4627bb63d9b --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_file_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdateFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_update_file(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateFileRequest( + ) + + # Make the request + response = await client.update_file(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdateFile_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_file_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_file_sync.py new file mode 100644 index 000000000000..86974c9370f1 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_file_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdateFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_update_file(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateFileRequest( + ) + + # Make the request + response = client.update_file(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdateFile_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_package_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_package_async.py new file mode 100644 index 000000000000..147913aa5d26 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_package_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePackage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdatePackage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_update_package(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdatePackageRequest( + ) + + # Make the request + response = await client.update_package(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdatePackage_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_package_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_package_sync.py new file mode 100644 index 000000000000..241048694f32 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_package_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePackage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdatePackage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_update_package(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdatePackageRequest( + ) + + # Make the request + response = client.update_package(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdatePackage_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_rule_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_rule_async.py new file mode 100644 index 000000000000..7df725d61ddb --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_rule_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdateRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_update_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateRuleRequest( + ) + + # Make the request + response = await client.update_rule(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdateRule_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_rule_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_rule_sync.py new file mode 100644 index 000000000000..cca94b54a9f5 --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_rule_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdateRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_update_rule(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateRuleRequest( + ) + + # Make the request + response = client.update_rule(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdateRule_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_version_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_version_async.py new file mode 100644 index 000000000000..8a9f92cbabbd --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_version_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdateVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_update_version(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateVersionRequest( + ) + + # Make the request + response = await client.update_version(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdateVersion_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_version_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_version_sync.py new file mode 100644 index 000000000000..168eadd34b5f --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_update_version_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_UpdateVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_update_version(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.UpdateVersionRequest( + ) + + # Make the request + response = client.update_version(request=request) + + # Handle the response + print(response) + +# [END artifactregistry_v1_generated_ArtifactRegistry_UpdateVersion_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json index d0e149f3a0f0..5d9aa157563e 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.12.0" + "version": "1.13.1" }, "snippets": [ { @@ -188,30 +188,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.create_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.create_attachment", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateAttachment", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "CreateRepository" + "shortName": "CreateAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.CreateRepositoryRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateAttachmentRequest" }, { "name": "parent", "type": "str" }, { - "name": "repository", - "type": "google.cloud.artifactregistry_v1.types.Repository" + "name": "attachment", + "type": "google.cloud.artifactregistry_v1.types.Attachment" }, { - "name": "repository_id", + "name": "attachment_id", "type": "str" }, { @@ -228,21 +228,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_repository" + "shortName": "create_attachment" }, - "description": "Sample for CreateRepository", - "file": "artifactregistry_v1_generated_artifact_registry_create_repository_async.py", + "description": "Sample for CreateAttachment", + "file": "artifactregistry_v1_generated_artifact_registry_create_attachment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateRepository_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateAttachment_async", "segments": [ { - "end": 56, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 61, "start": 27, "type": "SHORT" }, @@ -252,22 +252,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_create_repository_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_attachment_async.py" }, { "canonical": true, @@ -276,30 +276,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.create_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.create_attachment", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateAttachment", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "CreateRepository" + "shortName": "CreateAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.CreateRepositoryRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateAttachmentRequest" }, { "name": "parent", "type": "str" }, { - "name": "repository", - "type": "google.cloud.artifactregistry_v1.types.Repository" + "name": "attachment", + "type": "google.cloud.artifactregistry_v1.types.Attachment" }, { - "name": "repository_id", + "name": "attachment_id", "type": "str" }, { @@ -316,21 +316,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_repository" + "shortName": "create_attachment" }, - "description": "Sample for CreateRepository", - "file": "artifactregistry_v1_generated_artifact_registry_create_repository_sync.py", + "description": "Sample for CreateAttachment", + "file": "artifactregistry_v1_generated_artifact_registry_create_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateRepository_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateAttachment_sync", "segments": [ { - "end": 56, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 61, "start": 27, "type": "SHORT" }, @@ -340,22 +340,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_create_repository_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_attachment_sync.py" }, { "canonical": true, @@ -365,30 +365,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.create_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.create_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "CreateTag" + "shortName": "CreateRepository" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.CreateTagRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateRepositoryRequest" }, { "name": "parent", "type": "str" }, { - "name": "tag", - "type": "google.cloud.artifactregistry_v1.types.Tag" + "name": "repository", + "type": "google.cloud.artifactregistry_v1.types.Repository" }, { - "name": "tag_id", + "name": "repository_id", "type": "str" }, { @@ -404,22 +404,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Tag", - "shortName": "create_tag" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_repository" }, - "description": "Sample for CreateTag", - "file": "artifactregistry_v1_generated_artifact_registry_create_tag_async.py", + "description": "Sample for CreateRepository", + "file": "artifactregistry_v1_generated_artifact_registry_create_repository_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateTag_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateRepository_async", "segments": [ { - "end": 50, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 56, "start": 27, "type": "SHORT" }, @@ -429,22 +429,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_create_tag_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_repository_async.py" }, { "canonical": true, @@ -453,30 +453,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.create_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.create_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "CreateTag" + "shortName": "CreateRepository" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.CreateTagRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateRepositoryRequest" }, { "name": "parent", "type": "str" }, { - "name": "tag", - "type": "google.cloud.artifactregistry_v1.types.Tag" + "name": "repository", + "type": "google.cloud.artifactregistry_v1.types.Repository" }, { - "name": "tag_id", + "name": "repository_id", "type": "str" }, { @@ -492,22 +492,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Tag", - "shortName": "create_tag" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_repository" }, - "description": "Sample for CreateTag", - "file": "artifactregistry_v1_generated_artifact_registry_create_tag_sync.py", + "description": "Sample for CreateRepository", + "file": "artifactregistry_v1_generated_artifact_registry_create_repository_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateTag_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateRepository_sync", "segments": [ { - "end": 50, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 56, "start": 27, "type": "SHORT" }, @@ -517,22 +517,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_create_tag_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_repository_sync.py" }, { "canonical": true, @@ -542,22 +542,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.create_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeletePackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeletePackage" + "shortName": "CreateRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeletePackageRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateRuleRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "rule", + "type": "google.cloud.artifactregistry_v1.types.Rule" + }, + { + "name": "rule_id", "type": "str" }, { @@ -573,22 +581,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_package" + "resultType": "google.cloud.artifactregistry_v1.types.Rule", + "shortName": "create_rule" }, - "description": "Sample for DeletePackage", - "file": "artifactregistry_v1_generated_artifact_registry_delete_package_async.py", + "description": "Sample for CreateRule", + "file": "artifactregistry_v1_generated_artifact_registry_create_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeletePackage_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateRule_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -603,17 +611,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_package_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_rule_async.py" }, { "canonical": true, @@ -622,22 +630,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.create_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeletePackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeletePackage" + "shortName": "CreateRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeletePackageRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateRuleRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "rule", + "type": "google.cloud.artifactregistry_v1.types.Rule" + }, + { + "name": "rule_id", "type": "str" }, { @@ -653,22 +669,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_package" + "resultType": "google.cloud.artifactregistry_v1.types.Rule", + "shortName": "create_rule" }, - "description": "Sample for DeletePackage", - "file": "artifactregistry_v1_generated_artifact_registry_delete_package_sync.py", + "description": "Sample for CreateRule", + "file": "artifactregistry_v1_generated_artifact_registry_create_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeletePackage_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateRule_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -683,17 +699,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_package_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_rule_sync.py" }, { "canonical": true, @@ -703,22 +719,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.create_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeleteRepository" + "shortName": "CreateTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeleteRepositoryRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateTagRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "tag", + "type": "google.cloud.artifactregistry_v1.types.Tag" + }, + { + "name": "tag_id", "type": "str" }, { @@ -734,22 +758,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_repository" + "resultType": "google.cloud.artifactregistry_v1.types.Tag", + "shortName": "create_tag" }, - "description": "Sample for DeleteRepository", - "file": "artifactregistry_v1_generated_artifact_registry_delete_repository_async.py", + "description": "Sample for CreateTag", + "file": "artifactregistry_v1_generated_artifact_registry_create_tag_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteRepository_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateTag_async", "segments": [ { - "end": 55, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 50, "start": 27, "type": "SHORT" }, @@ -759,22 +783,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_repository_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_tag_async.py" }, { "canonical": true, @@ -783,22 +807,30 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.create_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.CreateTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeleteRepository" + "shortName": "CreateTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeleteRepositoryRequest" + "type": "google.cloud.artifactregistry_v1.types.CreateTagRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "tag", + "type": "google.cloud.artifactregistry_v1.types.Tag" + }, + { + "name": "tag_id", "type": "str" }, { @@ -814,22 +846,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_repository" + "resultType": "google.cloud.artifactregistry_v1.types.Tag", + "shortName": "create_tag" }, - "description": "Sample for DeleteRepository", - "file": "artifactregistry_v1_generated_artifact_registry_delete_repository_sync.py", + "description": "Sample for CreateTag", + "file": "artifactregistry_v1_generated_artifact_registry_create_tag_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteRepository_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_CreateTag_sync", "segments": [ { - "end": 55, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 50, "start": 27, "type": "SHORT" }, @@ -839,22 +871,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_repository_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_create_tag_sync.py" }, { "canonical": true, @@ -864,19 +896,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_attachment", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteAttachment", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeleteTag" + "shortName": "DeleteAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeleteTagRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteAttachmentRequest" }, { "name": "name", @@ -895,21 +927,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_tag" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_attachment" }, - "description": "Sample for DeleteTag", - "file": "artifactregistry_v1_generated_artifact_registry_delete_tag_async.py", + "description": "Sample for DeleteAttachment", + "file": "artifactregistry_v1_generated_artifact_registry_delete_attachment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteTag_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteAttachment_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, @@ -919,20 +952,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_tag_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_attachment_async.py" }, { "canonical": true, @@ -941,19 +976,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_attachment", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteAttachment", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeleteTag" + "shortName": "DeleteAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeleteTagRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteAttachmentRequest" }, { "name": "name", @@ -972,21 +1007,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_tag" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_attachment" }, - "description": "Sample for DeleteTag", - "file": "artifactregistry_v1_generated_artifact_registry_delete_tag_sync.py", + "description": "Sample for DeleteAttachment", + "file": "artifactregistry_v1_generated_artifact_registry_delete_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteTag_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteAttachment_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, @@ -996,20 +1032,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_tag_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_attachment_sync.py" }, { "canonical": true, @@ -1019,19 +1057,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_version", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_file", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteVersion", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteFile", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeleteVersion" + "shortName": "DeleteFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeleteVersionRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteFileRequest" }, { "name": "name", @@ -1051,21 +1089,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_version" + "shortName": "delete_file" }, - "description": "Sample for DeleteVersion", - "file": "artifactregistry_v1_generated_artifact_registry_delete_version_async.py", + "description": "Sample for DeleteFile", + "file": "artifactregistry_v1_generated_artifact_registry_delete_file_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteVersion_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteFile_async", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1075,22 +1113,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_version_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_file_async.py" }, { "canonical": true, @@ -1099,19 +1137,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_version", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_file", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteVersion", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteFile", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "DeleteVersion" + "shortName": "DeleteFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.DeleteVersionRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteFileRequest" }, { "name": "name", @@ -1131,21 +1169,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_version" + "shortName": "delete_file" }, - "description": "Sample for DeleteVersion", - "file": "artifactregistry_v1_generated_artifact_registry_delete_version_sync.py", + "description": "Sample for DeleteFile", + "file": "artifactregistry_v1_generated_artifact_registry_delete_file_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteVersion_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteFile_sync", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1155,22 +1193,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_delete_version_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_file_sync.py" }, { "canonical": true, @@ -1180,19 +1218,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_docker_image", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetDockerImage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeletePackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetDockerImage" + "shortName": "DeletePackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetDockerImageRequest" + "type": "google.cloud.artifactregistry_v1.types.DeletePackageRequest" }, { "name": "name", @@ -1211,22 +1249,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.DockerImage", - "shortName": "get_docker_image" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_package" }, - "description": "Sample for GetDockerImage", - "file": "artifactregistry_v1_generated_artifact_registry_get_docker_image_async.py", + "description": "Sample for DeletePackage", + "file": "artifactregistry_v1_generated_artifact_registry_delete_package_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetDockerImage_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeletePackage_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1241,17 +1279,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_docker_image_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_package_async.py" }, { "canonical": true, @@ -1260,19 +1298,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_docker_image", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetDockerImage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeletePackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetDockerImage" + "shortName": "DeletePackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetDockerImageRequest" + "type": "google.cloud.artifactregistry_v1.types.DeletePackageRequest" }, { "name": "name", @@ -1291,22 +1329,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.DockerImage", - "shortName": "get_docker_image" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_package" }, - "description": "Sample for GetDockerImage", - "file": "artifactregistry_v1_generated_artifact_registry_get_docker_image_sync.py", + "description": "Sample for DeletePackage", + "file": "artifactregistry_v1_generated_artifact_registry_delete_package_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetDockerImage_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeletePackage_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1321,17 +1359,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_docker_image_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_package_sync.py" }, { "canonical": true, @@ -1341,19 +1379,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_file", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetFile", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetFile" + "shortName": "DeleteRepository" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetFileRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteRepositoryRequest" }, { "name": "name", @@ -1372,22 +1410,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.File", - "shortName": "get_file" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_repository" }, - "description": "Sample for GetFile", - "file": "artifactregistry_v1_generated_artifact_registry_get_file_async.py", + "description": "Sample for DeleteRepository", + "file": "artifactregistry_v1_generated_artifact_registry_delete_repository_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetFile_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteRepository_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1402,17 +1440,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_file_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_repository_async.py" }, { "canonical": true, @@ -1421,19 +1459,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_file", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetFile", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetFile" + "shortName": "DeleteRepository" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetFileRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteRepositoryRequest" }, { "name": "name", @@ -1452,22 +1490,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.File", - "shortName": "get_file" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_repository" }, - "description": "Sample for GetFile", - "file": "artifactregistry_v1_generated_artifact_registry_get_file_sync.py", + "description": "Sample for DeleteRepository", + "file": "artifactregistry_v1_generated_artifact_registry_delete_repository_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetFile_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteRepository_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1482,17 +1520,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_file_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_repository_sync.py" }, { "canonical": true, @@ -1502,19 +1540,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_iam_policy", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetIamPolicy", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetIamPolicy" + "shortName": "DeleteRule" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteRuleRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1529,47 +1571,44 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" + "shortName": "delete_rule" }, - "description": "Sample for GetIamPolicy", - "file": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_async.py", + "description": "Sample for DeleteRule", + "file": "artifactregistry_v1_generated_artifact_registry_delete_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetIamPolicy_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteRule_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 42, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_rule_async.py" }, { "canonical": true, @@ -1578,23 +1617,27 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_iam_policy", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetIamPolicy", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetIamPolicy" + "shortName": "DeleteRule" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteRuleRequest" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" }, { "name": "timeout", @@ -1605,47 +1648,44 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" + "shortName": "delete_rule" }, - "description": "Sample for GetIamPolicy", - "file": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_sync.py", + "description": "Sample for DeleteRule", + "file": "artifactregistry_v1_generated_artifact_registry_delete_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetIamPolicy_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteRule_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 42, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_rule_sync.py" }, { "canonical": true, @@ -1655,19 +1695,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_maven_artifact", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetMavenArtifact", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetMavenArtifact" + "shortName": "DeleteTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetMavenArtifactRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteTagRequest" }, { "name": "name", @@ -1686,22 +1726,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.MavenArtifact", - "shortName": "get_maven_artifact" + "shortName": "delete_tag" }, - "description": "Sample for GetMavenArtifact", - "file": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_async.py", + "description": "Sample for DeleteTag", + "file": "artifactregistry_v1_generated_artifact_registry_delete_tag_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetMavenArtifact_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteTag_async", "segments": [ { - "end": 51, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 48, "start": 27, "type": "SHORT" }, @@ -1711,22 +1750,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_tag_async.py" }, { "canonical": true, @@ -1735,19 +1772,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_maven_artifact", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetMavenArtifact", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetMavenArtifact" + "shortName": "DeleteTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetMavenArtifactRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteTagRequest" }, { "name": "name", @@ -1766,22 +1803,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.MavenArtifact", - "shortName": "get_maven_artifact" + "shortName": "delete_tag" }, - "description": "Sample for GetMavenArtifact", - "file": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_sync.py", + "description": "Sample for DeleteTag", + "file": "artifactregistry_v1_generated_artifact_registry_delete_tag_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetMavenArtifact_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteTag_sync", "segments": [ { - "end": 51, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 48, "start": 27, "type": "SHORT" }, @@ -1791,22 +1827,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_tag_sync.py" }, { "canonical": true, @@ -1816,19 +1850,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_npm_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.delete_version", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetNpmPackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteVersion", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetNpmPackage" + "shortName": "DeleteVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetNpmPackageRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteVersionRequest" }, { "name": "name", @@ -1847,22 +1881,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.NpmPackage", - "shortName": "get_npm_package" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_version" }, - "description": "Sample for GetNpmPackage", - "file": "artifactregistry_v1_generated_artifact_registry_get_npm_package_async.py", + "description": "Sample for DeleteVersion", + "file": "artifactregistry_v1_generated_artifact_registry_delete_version_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetNpmPackage_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteVersion_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1872,22 +1906,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_npm_package_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_version_async.py" }, { "canonical": true, @@ -1896,19 +1930,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_npm_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.delete_version", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetNpmPackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.DeleteVersion", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetNpmPackage" + "shortName": "DeleteVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetNpmPackageRequest" + "type": "google.cloud.artifactregistry_v1.types.DeleteVersionRequest" }, { "name": "name", @@ -1927,22 +1961,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.NpmPackage", - "shortName": "get_npm_package" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_version" }, - "description": "Sample for GetNpmPackage", - "file": "artifactregistry_v1_generated_artifact_registry_get_npm_package_sync.py", + "description": "Sample for DeleteVersion", + "file": "artifactregistry_v1_generated_artifact_registry_delete_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetNpmPackage_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_DeleteVersion_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1952,22 +1986,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_npm_package_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_delete_version_sync.py" }, { "canonical": true, @@ -1977,19 +2011,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_attachment", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetAttachment", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetPackage" + "shortName": "GetAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetPackageRequest" + "type": "google.cloud.artifactregistry_v1.types.GetAttachmentRequest" }, { "name": "name", @@ -2008,14 +2042,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Package", - "shortName": "get_package" + "resultType": "google.cloud.artifactregistry_v1.types.Attachment", + "shortName": "get_attachment" }, - "description": "Sample for GetPackage", - "file": "artifactregistry_v1_generated_artifact_registry_get_package_async.py", + "description": "Sample for GetAttachment", + "file": "artifactregistry_v1_generated_artifact_registry_get_attachment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPackage_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetAttachment_async", "segments": [ { "end": 51, @@ -2048,7 +2082,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_package_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_attachment_async.py" }, { "canonical": true, @@ -2057,19 +2091,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_attachment", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetAttachment", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetPackage" + "shortName": "GetAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetPackageRequest" + "type": "google.cloud.artifactregistry_v1.types.GetAttachmentRequest" }, { "name": "name", @@ -2088,14 +2122,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Package", - "shortName": "get_package" + "resultType": "google.cloud.artifactregistry_v1.types.Attachment", + "shortName": "get_attachment" }, - "description": "Sample for GetPackage", - "file": "artifactregistry_v1_generated_artifact_registry_get_package_sync.py", + "description": "Sample for GetAttachment", + "file": "artifactregistry_v1_generated_artifact_registry_get_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPackage_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetAttachment_sync", "segments": [ { "end": 51, @@ -2128,7 +2162,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_package_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_attachment_sync.py" }, { "canonical": true, @@ -2138,19 +2172,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_project_settings", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_docker_image", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetProjectSettings", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetDockerImage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetProjectSettings" + "shortName": "GetDockerImage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetDockerImageRequest" }, { "name": "name", @@ -2169,14 +2203,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", - "shortName": "get_project_settings" + "resultType": "google.cloud.artifactregistry_v1.types.DockerImage", + "shortName": "get_docker_image" }, - "description": "Sample for GetProjectSettings", - "file": "artifactregistry_v1_generated_artifact_registry_get_project_settings_async.py", + "description": "Sample for GetDockerImage", + "file": "artifactregistry_v1_generated_artifact_registry_get_docker_image_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetProjectSettings_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetDockerImage_async", "segments": [ { "end": 51, @@ -2209,7 +2243,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_project_settings_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_docker_image_async.py" }, { "canonical": true, @@ -2218,19 +2252,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_project_settings", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_docker_image", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetProjectSettings", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetDockerImage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetProjectSettings" + "shortName": "GetDockerImage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetDockerImageRequest" }, { "name": "name", @@ -2249,14 +2283,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", - "shortName": "get_project_settings" + "resultType": "google.cloud.artifactregistry_v1.types.DockerImage", + "shortName": "get_docker_image" }, - "description": "Sample for GetProjectSettings", - "file": "artifactregistry_v1_generated_artifact_registry_get_project_settings_sync.py", + "description": "Sample for GetDockerImage", + "file": "artifactregistry_v1_generated_artifact_registry_get_docker_image_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetProjectSettings_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetDockerImage_sync", "segments": [ { "end": 51, @@ -2289,7 +2323,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_project_settings_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_docker_image_sync.py" }, { "canonical": true, @@ -2299,19 +2333,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_python_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_file", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPythonPackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetFile", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetPythonPackage" + "shortName": "GetFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetPythonPackageRequest" + "type": "google.cloud.artifactregistry_v1.types.GetFileRequest" }, { "name": "name", @@ -2330,14 +2364,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.PythonPackage", - "shortName": "get_python_package" + "resultType": "google.cloud.artifactregistry_v1.types.File", + "shortName": "get_file" }, - "description": "Sample for GetPythonPackage", - "file": "artifactregistry_v1_generated_artifact_registry_get_python_package_async.py", + "description": "Sample for GetFile", + "file": "artifactregistry_v1_generated_artifact_registry_get_file_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPythonPackage_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetFile_async", "segments": [ { "end": 51, @@ -2370,7 +2404,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_python_package_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_file_async.py" }, { "canonical": true, @@ -2379,19 +2413,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_python_package", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_file", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPythonPackage", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetFile", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetPythonPackage" + "shortName": "GetFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetPythonPackageRequest" + "type": "google.cloud.artifactregistry_v1.types.GetFileRequest" }, { "name": "name", @@ -2410,14 +2444,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.PythonPackage", - "shortName": "get_python_package" + "resultType": "google.cloud.artifactregistry_v1.types.File", + "shortName": "get_file" }, - "description": "Sample for GetPythonPackage", - "file": "artifactregistry_v1_generated_artifact_registry_get_python_package_sync.py", + "description": "Sample for GetFile", + "file": "artifactregistry_v1_generated_artifact_registry_get_file_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPythonPackage_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetFile_sync", "segments": [ { "end": 51, @@ -2450,7 +2484,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_python_package_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_file_sync.py" }, { "canonical": true, @@ -2460,23 +2494,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_iam_policy", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetIamPolicy", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetRepository" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetRepositoryRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" }, { "name": "retry", @@ -2491,47 +2521,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Repository", - "shortName": "get_repository" + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for GetRepository", - "file": "artifactregistry_v1_generated_artifact_registry_get_repository_async.py", + "description": "Sample for GetIamPolicy", + "file": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetRepository_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetIamPolicy_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 40, - "start": 38, + "end": 41, + "start": 39, "type": "CLIENT_INITIALIZATION" }, { - "end": 45, - "start": 41, + "end": 46, + "start": 42, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_repository_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_async.py" }, { "canonical": true, @@ -2540,23 +2570,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_iam_policy", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetIamPolicy", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetRepository" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetRepositoryRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" }, { "name": "retry", @@ -2571,47 +2597,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Repository", - "shortName": "get_repository" + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for GetRepository", - "file": "artifactregistry_v1_generated_artifact_registry_get_repository_sync.py", + "description": "Sample for GetIamPolicy", + "file": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetRepository_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetIamPolicy_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 40, - "start": 38, + "end": 41, + "start": 39, "type": "CLIENT_INITIALIZATION" }, { - "end": 45, - "start": 41, + "end": 46, + "start": 42, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_repository_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_iam_policy_sync.py" }, { "canonical": true, @@ -2621,19 +2647,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_maven_artifact", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetMavenArtifact", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetTag" + "shortName": "GetMavenArtifact" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetTagRequest" + "type": "google.cloud.artifactregistry_v1.types.GetMavenArtifactRequest" }, { "name": "name", @@ -2652,22 +2678,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Tag", - "shortName": "get_tag" + "resultType": "google.cloud.artifactregistry_v1.types.MavenArtifact", + "shortName": "get_maven_artifact" }, - "description": "Sample for GetTag", - "file": "artifactregistry_v1_generated_artifact_registry_get_tag_async.py", + "description": "Sample for GetMavenArtifact", + "file": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetTag_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetMavenArtifact_async", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2677,22 +2703,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_tag_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_async.py" }, { "canonical": true, @@ -2701,19 +2727,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_maven_artifact", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetMavenArtifact", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetTag" + "shortName": "GetMavenArtifact" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetTagRequest" + "type": "google.cloud.artifactregistry_v1.types.GetMavenArtifactRequest" }, { "name": "name", @@ -2732,22 +2758,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Tag", - "shortName": "get_tag" + "resultType": "google.cloud.artifactregistry_v1.types.MavenArtifact", + "shortName": "get_maven_artifact" }, - "description": "Sample for GetTag", - "file": "artifactregistry_v1_generated_artifact_registry_get_tag_sync.py", + "description": "Sample for GetMavenArtifact", + "file": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetTag_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetMavenArtifact_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2757,22 +2783,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_tag_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_maven_artifact_sync.py" }, { "canonical": true, @@ -2782,19 +2808,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_vpcsc_config", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_npm_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVPCSCConfig", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetNpmPackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetVPCSCConfig" + "shortName": "GetNpmPackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest" + "type": "google.cloud.artifactregistry_v1.types.GetNpmPackageRequest" }, { "name": "name", @@ -2813,14 +2839,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", - "shortName": "get_vpcsc_config" + "resultType": "google.cloud.artifactregistry_v1.types.NpmPackage", + "shortName": "get_npm_package" }, - "description": "Sample for GetVPCSCConfig", - "file": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_async.py", + "description": "Sample for GetNpmPackage", + "file": "artifactregistry_v1_generated_artifact_registry_get_npm_package_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVPCSCConfig_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetNpmPackage_async", "segments": [ { "end": 51, @@ -2853,7 +2879,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_npm_package_async.py" }, { "canonical": true, @@ -2862,19 +2888,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_vpcsc_config", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_npm_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVPCSCConfig", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetNpmPackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetVPCSCConfig" + "shortName": "GetNpmPackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest" + "type": "google.cloud.artifactregistry_v1.types.GetNpmPackageRequest" }, { "name": "name", @@ -2893,14 +2919,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", - "shortName": "get_vpcsc_config" + "resultType": "google.cloud.artifactregistry_v1.types.NpmPackage", + "shortName": "get_npm_package" }, - "description": "Sample for GetVPCSCConfig", - "file": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_sync.py", + "description": "Sample for GetNpmPackage", + "file": "artifactregistry_v1_generated_artifact_registry_get_npm_package_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVPCSCConfig_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetNpmPackage_sync", "segments": [ { "end": 51, @@ -2933,7 +2959,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_npm_package_sync.py" }, { "canonical": true, @@ -2943,19 +2969,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_version", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVersion", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetVersion" + "shortName": "GetPackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetVersionRequest" + "type": "google.cloud.artifactregistry_v1.types.GetPackageRequest" }, { "name": "name", @@ -2974,22 +3000,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Version", - "shortName": "get_version" + "resultType": "google.cloud.artifactregistry_v1.types.Package", + "shortName": "get_package" }, - "description": "Sample for GetVersion", - "file": "artifactregistry_v1_generated_artifact_registry_get_version_async.py", + "description": "Sample for GetPackage", + "file": "artifactregistry_v1_generated_artifact_registry_get_package_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVersion_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPackage_async", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2999,22 +3025,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_version_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_package_async.py" }, { "canonical": true, @@ -3023,19 +3049,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_version", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVersion", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "GetVersion" + "shortName": "GetPackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.GetVersionRequest" + "type": "google.cloud.artifactregistry_v1.types.GetPackageRequest" }, { "name": "name", @@ -3054,22 +3080,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Version", - "shortName": "get_version" + "resultType": "google.cloud.artifactregistry_v1.types.Package", + "shortName": "get_package" }, - "description": "Sample for GetVersion", - "file": "artifactregistry_v1_generated_artifact_registry_get_version_sync.py", + "description": "Sample for GetPackage", + "file": "artifactregistry_v1_generated_artifact_registry_get_package_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVersion_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPackage_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3079,22 +3105,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_get_version_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_package_sync.py" }, { "canonical": true, @@ -3104,19 +3130,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.import_apt_artifacts", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_project_settings", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportAptArtifacts", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetProjectSettings", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ImportAptArtifacts" + "shortName": "GetProjectSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ImportAptArtifactsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -3131,22 +3161,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_apt_artifacts" + "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", + "shortName": "get_project_settings" }, - "description": "Sample for ImportAptArtifacts", - "file": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_async.py", + "description": "Sample for GetProjectSettings", + "file": "artifactregistry_v1_generated_artifact_registry_get_project_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportAptArtifacts_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetProjectSettings_async", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3156,22 +3186,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_project_settings_async.py" }, { "canonical": true, @@ -3180,19 +3210,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.import_apt_artifacts", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_project_settings", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportAptArtifacts", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetProjectSettings", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ImportAptArtifacts" + "shortName": "GetProjectSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ImportAptArtifactsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetProjectSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -3207,22 +3241,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_apt_artifacts" + "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", + "shortName": "get_project_settings" }, - "description": "Sample for ImportAptArtifacts", - "file": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_sync.py", + "description": "Sample for GetProjectSettings", + "file": "artifactregistry_v1_generated_artifact_registry_get_project_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportAptArtifacts_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetProjectSettings_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3232,22 +3266,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_project_settings_sync.py" }, { "canonical": true, @@ -3257,19 +3291,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.import_yum_artifacts", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_python_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportYumArtifacts", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPythonPackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ImportYumArtifacts" + "shortName": "GetPythonPackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ImportYumArtifactsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetPythonPackageRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -3284,22 +3322,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_yum_artifacts" + "resultType": "google.cloud.artifactregistry_v1.types.PythonPackage", + "shortName": "get_python_package" }, - "description": "Sample for ImportYumArtifacts", - "file": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_async.py", + "description": "Sample for GetPythonPackage", + "file": "artifactregistry_v1_generated_artifact_registry_get_python_package_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportYumArtifacts_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPythonPackage_async", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3309,22 +3347,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_python_package_async.py" }, { "canonical": true, @@ -3333,19 +3371,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.import_yum_artifacts", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_python_package", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportYumArtifacts", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetPythonPackage", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ImportYumArtifacts" + "shortName": "GetPythonPackage" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ImportYumArtifactsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetPythonPackageRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -3360,22 +3402,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_yum_artifacts" + "resultType": "google.cloud.artifactregistry_v1.types.PythonPackage", + "shortName": "get_python_package" }, - "description": "Sample for ImportYumArtifacts", - "file": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_sync.py", + "description": "Sample for GetPythonPackage", + "file": "artifactregistry_v1_generated_artifact_registry_get_python_package_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportYumArtifacts_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetPythonPackage_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3385,22 +3427,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_python_package_sync.py" }, { "canonical": true, @@ -3410,22 +3452,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_docker_images", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListDockerImages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListDockerImages" + "shortName": "GetRepository" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListDockerImagesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetRepositoryRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3441,22 +3483,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListDockerImagesAsyncPager", - "shortName": "list_docker_images" + "resultType": "google.cloud.artifactregistry_v1.types.Repository", + "shortName": "get_repository" }, - "description": "Sample for ListDockerImages", - "file": "artifactregistry_v1_generated_artifact_registry_list_docker_images_async.py", + "description": "Sample for GetRepository", + "file": "artifactregistry_v1_generated_artifact_registry_get_repository_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListDockerImages_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetRepository_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3476,12 +3518,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_docker_images_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_repository_async.py" }, { "canonical": true, @@ -3490,22 +3532,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_docker_images", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListDockerImages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListDockerImages" + "shortName": "GetRepository" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListDockerImagesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetRepositoryRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3521,22 +3563,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListDockerImagesPager", - "shortName": "list_docker_images" + "resultType": "google.cloud.artifactregistry_v1.types.Repository", + "shortName": "get_repository" }, - "description": "Sample for ListDockerImages", - "file": "artifactregistry_v1_generated_artifact_registry_list_docker_images_sync.py", + "description": "Sample for GetRepository", + "file": "artifactregistry_v1_generated_artifact_registry_get_repository_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListDockerImages_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetRepository_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3556,12 +3598,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_docker_images_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_repository_sync.py" }, { "canonical": true, @@ -3571,22 +3613,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_files", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListFiles", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListFiles" + "shortName": "GetRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListFilesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetRuleRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3602,22 +3644,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListFilesAsyncPager", - "shortName": "list_files" + "resultType": "google.cloud.artifactregistry_v1.types.Rule", + "shortName": "get_rule" }, - "description": "Sample for ListFiles", - "file": "artifactregistry_v1_generated_artifact_registry_list_files_async.py", + "description": "Sample for GetRule", + "file": "artifactregistry_v1_generated_artifact_registry_get_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListFiles_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetRule_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3637,12 +3679,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_files_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_rule_async.py" }, { "canonical": true, @@ -3651,22 +3693,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_files", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListFiles", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListFiles" + "shortName": "GetRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListFilesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetRuleRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3682,22 +3724,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListFilesPager", - "shortName": "list_files" + "resultType": "google.cloud.artifactregistry_v1.types.Rule", + "shortName": "get_rule" }, - "description": "Sample for ListFiles", - "file": "artifactregistry_v1_generated_artifact_registry_list_files_sync.py", + "description": "Sample for GetRule", + "file": "artifactregistry_v1_generated_artifact_registry_get_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListFiles_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetRule_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3717,12 +3759,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_files_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_rule_sync.py" }, { "canonical": true, @@ -3732,22 +3774,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_maven_artifacts", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListMavenArtifacts", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListMavenArtifacts" + "shortName": "GetTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListMavenArtifactsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetTagRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3763,22 +3805,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListMavenArtifactsAsyncPager", - "shortName": "list_maven_artifacts" + "resultType": "google.cloud.artifactregistry_v1.types.Tag", + "shortName": "get_tag" }, - "description": "Sample for ListMavenArtifacts", - "file": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_async.py", + "description": "Sample for GetTag", + "file": "artifactregistry_v1_generated_artifact_registry_get_tag_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListMavenArtifacts_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetTag_async", "segments": [ { - "end": 52, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 50, "start": 27, "type": "SHORT" }, @@ -3788,22 +3830,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_tag_async.py" }, { "canonical": true, @@ -3812,22 +3854,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_maven_artifacts", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListMavenArtifacts", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListMavenArtifacts" + "shortName": "GetTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListMavenArtifactsRequest" + "type": "google.cloud.artifactregistry_v1.types.GetTagRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3843,22 +3885,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListMavenArtifactsPager", - "shortName": "list_maven_artifacts" + "resultType": "google.cloud.artifactregistry_v1.types.Tag", + "shortName": "get_tag" }, - "description": "Sample for ListMavenArtifacts", - "file": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_sync.py", + "description": "Sample for GetTag", + "file": "artifactregistry_v1_generated_artifact_registry_get_tag_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListMavenArtifacts_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetTag_sync", "segments": [ { - "end": 52, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 50, "start": 27, "type": "SHORT" }, @@ -3868,22 +3910,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_tag_sync.py" }, { "canonical": true, @@ -3893,22 +3935,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_npm_packages", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_vpcsc_config", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListNpmPackages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVPCSCConfig", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListNpmPackages" + "shortName": "GetVPCSCConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListNpmPackagesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3924,22 +3966,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListNpmPackagesAsyncPager", - "shortName": "list_npm_packages" + "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", + "shortName": "get_vpcsc_config" }, - "description": "Sample for ListNpmPackages", - "file": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_async.py", + "description": "Sample for GetVPCSCConfig", + "file": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListNpmPackages_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVPCSCConfig_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3959,12 +4001,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_async.py" }, { "canonical": true, @@ -3973,22 +4015,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_npm_packages", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_vpcsc_config", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListNpmPackages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVPCSCConfig", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListNpmPackages" + "shortName": "GetVPCSCConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListNpmPackagesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetVPCSCConfigRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -4004,22 +4046,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListNpmPackagesPager", - "shortName": "list_npm_packages" + "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", + "shortName": "get_vpcsc_config" }, - "description": "Sample for ListNpmPackages", - "file": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_sync.py", + "description": "Sample for GetVPCSCConfig", + "file": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListNpmPackages_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVPCSCConfig_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4039,12 +4081,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_vpcsc_config_sync.py" }, { "canonical": true, @@ -4054,22 +4096,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_packages", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.get_version", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPackages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVersion", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListPackages" + "shortName": "GetVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListPackagesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetVersionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -4085,22 +4127,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPackagesAsyncPager", - "shortName": "list_packages" + "resultType": "google.cloud.artifactregistry_v1.types.Version", + "shortName": "get_version" }, - "description": "Sample for ListPackages", - "file": "artifactregistry_v1_generated_artifact_registry_list_packages_async.py", + "description": "Sample for GetVersion", + "file": "artifactregistry_v1_generated_artifact_registry_get_version_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPackages_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVersion_async", "segments": [ { - "end": 52, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 50, "start": 27, "type": "SHORT" }, @@ -4110,22 +4152,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_packages_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_version_async.py" }, { "canonical": true, @@ -4134,22 +4176,22 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_packages", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.get_version", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPackages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.GetVersion", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListPackages" + "shortName": "GetVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListPackagesRequest" + "type": "google.cloud.artifactregistry_v1.types.GetVersionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -4165,22 +4207,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPackagesPager", - "shortName": "list_packages" + "resultType": "google.cloud.artifactregistry_v1.types.Version", + "shortName": "get_version" }, - "description": "Sample for ListPackages", - "file": "artifactregistry_v1_generated_artifact_registry_list_packages_sync.py", + "description": "Sample for GetVersion", + "file": "artifactregistry_v1_generated_artifact_registry_get_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPackages_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_GetVersion_sync", "segments": [ { - "end": 52, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 50, "start": 27, "type": "SHORT" }, @@ -4190,22 +4232,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_packages_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_get_version_sync.py" }, { "canonical": true, @@ -4215,23 +4257,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_python_packages", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.import_apt_artifacts", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPythonPackages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportAptArtifacts", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListPythonPackages" + "shortName": "ImportAptArtifacts" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListPythonPackagesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.artifactregistry_v1.types.ImportAptArtifactsRequest" }, { "name": "retry", @@ -4246,22 +4284,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPythonPackagesAsyncPager", - "shortName": "list_python_packages" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_apt_artifacts" }, - "description": "Sample for ListPythonPackages", - "file": "artifactregistry_v1_generated_artifact_registry_list_python_packages_async.py", + "description": "Sample for ImportAptArtifacts", + "file": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPythonPackages_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportAptArtifacts_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4271,22 +4309,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_python_packages_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_async.py" }, { "canonical": true, @@ -4295,23 +4333,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_python_packages", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.import_apt_artifacts", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPythonPackages", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportAptArtifacts", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListPythonPackages" + "shortName": "ImportAptArtifacts" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListPythonPackagesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.artifactregistry_v1.types.ImportAptArtifactsRequest" }, { "name": "retry", @@ -4326,22 +4360,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPythonPackagesPager", - "shortName": "list_python_packages" + "resultType": "google.api_core.operation.Operation", + "shortName": "import_apt_artifacts" }, - "description": "Sample for ListPythonPackages", - "file": "artifactregistry_v1_generated_artifact_registry_list_python_packages_sync.py", + "description": "Sample for ImportAptArtifacts", + "file": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPythonPackages_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportAptArtifacts_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4351,22 +4385,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_python_packages_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_import_apt_artifacts_sync.py" }, { "canonical": true, @@ -4376,23 +4410,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_repositories", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.import_yum_artifacts", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListRepositories", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportYumArtifacts", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListRepositories" + "shortName": "ImportYumArtifacts" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.artifactregistry_v1.types.ImportYumArtifactsRequest" }, { "name": "retry", @@ -4407,22 +4437,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRepositoriesAsyncPager", - "shortName": "list_repositories" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_yum_artifacts" }, - "description": "Sample for ListRepositories", - "file": "artifactregistry_v1_generated_artifact_registry_list_repositories_async.py", + "description": "Sample for ImportYumArtifacts", + "file": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListRepositories_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportYumArtifacts_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4432,22 +4462,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_repositories_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_async.py" }, { "canonical": true, @@ -4456,23 +4486,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_repositories", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.import_yum_artifacts", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListRepositories", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ImportYumArtifacts", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListRepositories" + "shortName": "ImportYumArtifacts" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.artifactregistry_v1.types.ImportYumArtifactsRequest" }, { "name": "retry", @@ -4487,22 +4513,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRepositoriesPager", - "shortName": "list_repositories" + "resultType": "google.api_core.operation.Operation", + "shortName": "import_yum_artifacts" }, - "description": "Sample for ListRepositories", - "file": "artifactregistry_v1_generated_artifact_registry_list_repositories_sync.py", + "description": "Sample for ImportYumArtifacts", + "file": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListRepositories_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ImportYumArtifacts_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4512,22 +4538,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_repositories_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_import_yum_artifacts_sync.py" }, { "canonical": true, @@ -4537,19 +4563,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_tags", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_attachments", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListTags", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListAttachments", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListTags" + "shortName": "ListAttachments" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListTagsRequest" + "type": "google.cloud.artifactregistry_v1.types.ListAttachmentsRequest" }, { "name": "parent", @@ -4568,22 +4594,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsAsyncPager", - "shortName": "list_tags" + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListAttachmentsAsyncPager", + "shortName": "list_attachments" }, - "description": "Sample for ListTags", - "file": "artifactregistry_v1_generated_artifact_registry_list_tags_async.py", + "description": "Sample for ListAttachments", + "file": "artifactregistry_v1_generated_artifact_registry_list_attachments_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListTags_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListAttachments_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4593,22 +4619,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 48, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_tags_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_list_attachments_async.py" }, { "canonical": true, @@ -4617,19 +4643,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_tags", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_attachments", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListTags", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListAttachments", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListTags" + "shortName": "ListAttachments" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListTagsRequest" + "type": "google.cloud.artifactregistry_v1.types.ListAttachmentsRequest" }, { "name": "parent", @@ -4648,22 +4674,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsPager", - "shortName": "list_tags" + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListAttachmentsPager", + "shortName": "list_attachments" }, - "description": "Sample for ListTags", - "file": "artifactregistry_v1_generated_artifact_registry_list_tags_sync.py", + "description": "Sample for ListAttachments", + "file": "artifactregistry_v1_generated_artifact_registry_list_attachments_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListTags_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListAttachments_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4673,22 +4699,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 48, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_tags_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_list_attachments_sync.py" }, { "canonical": true, @@ -4698,19 +4724,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_versions", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_docker_images", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListVersions", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListDockerImages", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListVersions" + "shortName": "ListDockerImages" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListVersionsRequest" + "type": "google.cloud.artifactregistry_v1.types.ListDockerImagesRequest" }, { "name": "parent", @@ -4729,22 +4755,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListVersionsAsyncPager", - "shortName": "list_versions" + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListDockerImagesAsyncPager", + "shortName": "list_docker_images" }, - "description": "Sample for ListVersions", - "file": "artifactregistry_v1_generated_artifact_registry_list_versions_async.py", + "description": "Sample for ListDockerImages", + "file": "artifactregistry_v1_generated_artifact_registry_list_docker_images_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListVersions_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListDockerImages_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4754,22 +4780,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 48, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_versions_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_list_docker_images_async.py" }, { "canonical": true, @@ -4778,19 +4804,19 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_versions", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_docker_images", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListVersions", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListDockerImages", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "ListVersions" + "shortName": "ListDockerImages" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.ListVersionsRequest" + "type": "google.cloud.artifactregistry_v1.types.ListDockerImagesRequest" }, { "name": "parent", @@ -4809,22 +4835,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListVersionsPager", - "shortName": "list_versions" + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListDockerImagesPager", + "shortName": "list_docker_images" }, - "description": "Sample for ListVersions", - "file": "artifactregistry_v1_generated_artifact_registry_list_versions_sync.py", + "description": "Sample for ListDockerImages", + "file": "artifactregistry_v1_generated_artifact_registry_list_docker_images_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListVersions_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListDockerImages_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4834,22 +4860,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 48, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_list_versions_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_list_docker_images_sync.py" }, { "canonical": true, @@ -4859,19 +4885,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.set_iam_policy", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_files", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.SetIamPolicy", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListFiles", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "SetIamPolicy" + "shortName": "ListFiles" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + "type": "google.cloud.artifactregistry_v1.types.ListFilesRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -4886,14 +4916,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListFilesAsyncPager", + "shortName": "list_files" }, - "description": "Sample for SetIamPolicy", - "file": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_async.py", + "description": "Sample for ListFiles", + "file": "artifactregistry_v1_generated_artifact_registry_list_files_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_SetIamPolicy_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListFiles_async", "segments": [ { "end": 52, @@ -4906,27 +4936,27 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 42, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_list_files_async.py" }, { "canonical": true, @@ -4935,19 +4965,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.set_iam_policy", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_files", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.SetIamPolicy", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListFiles", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "SetIamPolicy" + "shortName": "ListFiles" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + "type": "google.cloud.artifactregistry_v1.types.ListFilesRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -4962,14 +4996,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListFilesPager", + "shortName": "list_files" }, - "description": "Sample for SetIamPolicy", - "file": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_sync.py", + "description": "Sample for ListFiles", + "file": "artifactregistry_v1_generated_artifact_registry_list_files_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_SetIamPolicy_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListFiles_sync", "segments": [ { "end": 52, @@ -4982,27 +5016,2128 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_files_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_maven_artifacts", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListMavenArtifacts", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListMavenArtifacts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListMavenArtifactsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListMavenArtifactsAsyncPager", + "shortName": "list_maven_artifacts" + }, + "description": "Sample for ListMavenArtifacts", + "file": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListMavenArtifacts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_maven_artifacts", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListMavenArtifacts", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListMavenArtifacts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListMavenArtifactsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListMavenArtifactsPager", + "shortName": "list_maven_artifacts" + }, + "description": "Sample for ListMavenArtifacts", + "file": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListMavenArtifacts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_maven_artifacts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_npm_packages", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListNpmPackages", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListNpmPackages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListNpmPackagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListNpmPackagesAsyncPager", + "shortName": "list_npm_packages" + }, + "description": "Sample for ListNpmPackages", + "file": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListNpmPackages_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_npm_packages", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListNpmPackages", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListNpmPackages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListNpmPackagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListNpmPackagesPager", + "shortName": "list_npm_packages" + }, + "description": "Sample for ListNpmPackages", + "file": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListNpmPackages_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_npm_packages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_packages", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPackages", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListPackages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListPackagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPackagesAsyncPager", + "shortName": "list_packages" + }, + "description": "Sample for ListPackages", + "file": "artifactregistry_v1_generated_artifact_registry_list_packages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPackages_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_packages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_packages", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPackages", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListPackages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListPackagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPackagesPager", + "shortName": "list_packages" + }, + "description": "Sample for ListPackages", + "file": "artifactregistry_v1_generated_artifact_registry_list_packages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPackages_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_packages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_python_packages", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPythonPackages", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListPythonPackages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListPythonPackagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPythonPackagesAsyncPager", + "shortName": "list_python_packages" + }, + "description": "Sample for ListPythonPackages", + "file": "artifactregistry_v1_generated_artifact_registry_list_python_packages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPythonPackages_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_python_packages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_python_packages", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListPythonPackages", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListPythonPackages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListPythonPackagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListPythonPackagesPager", + "shortName": "list_python_packages" + }, + "description": "Sample for ListPythonPackages", + "file": "artifactregistry_v1_generated_artifact_registry_list_python_packages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListPythonPackages_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_python_packages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_repositories", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListRepositories", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRepositoriesAsyncPager", + "shortName": "list_repositories" + }, + "description": "Sample for ListRepositories", + "file": "artifactregistry_v1_generated_artifact_registry_list_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListRepositories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_repositories", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListRepositories", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRepositoriesPager", + "shortName": "list_repositories" + }, + "description": "Sample for ListRepositories", + "file": "artifactregistry_v1_generated_artifact_registry_list_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListRepositories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_rules", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListRules", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRulesAsyncPager", + "shortName": "list_rules" + }, + "description": "Sample for ListRules", + "file": "artifactregistry_v1_generated_artifact_registry_list_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListRules_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_rules", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListRules", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRulesPager", + "shortName": "list_rules" + }, + "description": "Sample for ListRules", + "file": "artifactregistry_v1_generated_artifact_registry_list_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListRules_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_tags", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListTags", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsAsyncPager", + "shortName": "list_tags" + }, + "description": "Sample for ListTags", + "file": "artifactregistry_v1_generated_artifact_registry_list_tags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListTags_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_tags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_tags", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListTags", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListTagsPager", + "shortName": "list_tags" + }, + "description": "Sample for ListTags", + "file": "artifactregistry_v1_generated_artifact_registry_list_tags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListTags_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_tags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.list_versions", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListVersions", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListVersionsAsyncPager", + "shortName": "list_versions" + }, + "description": "Sample for ListVersions", + "file": "artifactregistry_v1_generated_artifact_registry_list_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListVersions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_versions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.list_versions", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ListVersions", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ListVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ListVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListVersionsPager", + "shortName": "list_versions" + }, + "description": "Sample for ListVersions", + "file": "artifactregistry_v1_generated_artifact_registry_list_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ListVersions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_list_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.set_iam_policy", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.SetIamPolicy", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.set_iam_policy", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.SetIamPolicy", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.TestIamPermissions", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.test_iam_permissions", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.TestIamPermissions", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_file", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateFile", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "UpdateFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.UpdateFileRequest" + }, + { + "name": "file", + "type": "google.cloud.artifactregistry_v1.types.File" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.types.File", + "shortName": "update_file" + }, + "description": "Sample for UpdateFile", + "file": "artifactregistry_v1_generated_artifact_registry_update_file_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateFile_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_update_file_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_file", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateFile", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "UpdateFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.UpdateFileRequest" + }, + { + "name": "file", + "type": "google.cloud.artifactregistry_v1.types.File" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.types.File", + "shortName": "update_file" + }, + "description": "Sample for UpdateFile", + "file": "artifactregistry_v1_generated_artifact_registry_update_file_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateFile_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_update_file_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_package", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdatePackage", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "UpdatePackage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.UpdatePackageRequest" + }, + { + "name": "package", + "type": "google.cloud.artifactregistry_v1.types.Package" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.types.Package", + "shortName": "update_package" + }, + "description": "Sample for UpdatePackage", + "file": "artifactregistry_v1_generated_artifact_registry_update_package_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdatePackage_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_update_package_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_package", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdatePackage", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "UpdatePackage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.UpdatePackageRequest" + }, + { + "name": "package", + "type": "google.cloud.artifactregistry_v1.types.Package" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.types.Package", + "shortName": "update_package" + }, + "description": "Sample for UpdatePackage", + "file": "artifactregistry_v1_generated_artifact_registry_update_package_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdatePackage_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_update_package_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_project_settings", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateProjectSettings", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "UpdateProjectSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest" + }, + { + "name": "project_settings", + "type": "google.cloud.artifactregistry_v1.types.ProjectSettings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", + "shortName": "update_project_settings" + }, + "description": "Sample for UpdateProjectSettings", + "file": "artifactregistry_v1_generated_artifact_registry_update_project_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateProjectSettings_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_update_project_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_project_settings", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateProjectSettings", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "UpdateProjectSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest" + }, + { + "name": "project_settings", + "type": "google.cloud.artifactregistry_v1.types.ProjectSettings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", + "shortName": "update_project_settings" + }, + "description": "Sample for UpdateProjectSettings", + "file": "artifactregistry_v1_generated_artifact_registry_update_project_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateProjectSettings_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 42, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_set_iam_policy_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_project_settings_sync.py" }, { "canonical": true, @@ -5012,19 +7147,27 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.test_iam_permissions", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.TestIamPermissions", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "TestIamPermissions" + "shortName": "UpdateRepository" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateRepositoryRequest" + }, + { + "name": "repository", + "type": "google.cloud.artifactregistry_v1.types.Repository" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5039,47 +7182,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.artifactregistry_v1.types.Repository", + "shortName": "update_repository" }, - "description": "Sample for TestIamPermissions", - "file": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_async.py", + "description": "Sample for UpdateRepository", + "file": "artifactregistry_v1_generated_artifact_registry_update_repository_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_TestIamPermissions_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateRepository_async", "segments": [ { - "end": 53, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 47, - "start": 42, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_repository_async.py" }, { "canonical": true, @@ -5088,19 +7231,27 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.test_iam_permissions", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_repository", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.TestIamPermissions", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateRepository", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "TestIamPermissions" + "shortName": "UpdateRepository" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateRepositoryRequest" + }, + { + "name": "repository", + "type": "google.cloud.artifactregistry_v1.types.Repository" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5115,47 +7266,47 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.artifactregistry_v1.types.Repository", + "shortName": "update_repository" }, - "description": "Sample for TestIamPermissions", - "file": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_sync.py", + "description": "Sample for UpdateRepository", + "file": "artifactregistry_v1_generated_artifact_registry_update_repository_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_TestIamPermissions_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateRepository_sync", "segments": [ { - "end": 53, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 47, - "start": 42, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_test_iam_permissions_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_repository_sync.py" }, { "canonical": true, @@ -5165,23 +7316,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_project_settings", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateProjectSettings", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateProjectSettings" + "shortName": "UpdateRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateRuleRequest" }, { - "name": "project_settings", - "type": "google.cloud.artifactregistry_v1.types.ProjectSettings" + "name": "rule", + "type": "google.cloud.artifactregistry_v1.types.Rule" }, { "name": "update_mask", @@ -5200,14 +7351,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", - "shortName": "update_project_settings" + "resultType": "google.cloud.artifactregistry_v1.types.Rule", + "shortName": "update_rule" }, - "description": "Sample for UpdateProjectSettings", - "file": "artifactregistry_v1_generated_artifact_registry_update_project_settings_async.py", + "description": "Sample for UpdateRule", + "file": "artifactregistry_v1_generated_artifact_registry_update_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateProjectSettings_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateRule_async", "segments": [ { "end": 50, @@ -5240,7 +7391,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_project_settings_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_rule_async.py" }, { "canonical": true, @@ -5249,23 +7400,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_project_settings", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_rule", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateProjectSettings", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateRule", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateProjectSettings" + "shortName": "UpdateRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateProjectSettingsRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateRuleRequest" }, { - "name": "project_settings", - "type": "google.cloud.artifactregistry_v1.types.ProjectSettings" + "name": "rule", + "type": "google.cloud.artifactregistry_v1.types.Rule" }, { "name": "update_mask", @@ -5284,14 +7435,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.ProjectSettings", - "shortName": "update_project_settings" + "resultType": "google.cloud.artifactregistry_v1.types.Rule", + "shortName": "update_rule" }, - "description": "Sample for UpdateProjectSettings", - "file": "artifactregistry_v1_generated_artifact_registry_update_project_settings_sync.py", + "description": "Sample for UpdateRule", + "file": "artifactregistry_v1_generated_artifact_registry_update_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateProjectSettings_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateRule_sync", "segments": [ { "end": 50, @@ -5324,7 +7475,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_project_settings_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_rule_sync.py" }, { "canonical": true, @@ -5334,23 +7485,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateRepository" + "shortName": "UpdateTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateRepositoryRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateTagRequest" }, { - "name": "repository", - "type": "google.cloud.artifactregistry_v1.types.Repository" + "name": "tag", + "type": "google.cloud.artifactregistry_v1.types.Tag" }, { "name": "update_mask", @@ -5369,14 +7520,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Repository", - "shortName": "update_repository" + "resultType": "google.cloud.artifactregistry_v1.types.Tag", + "shortName": "update_tag" }, - "description": "Sample for UpdateRepository", - "file": "artifactregistry_v1_generated_artifact_registry_update_repository_async.py", + "description": "Sample for UpdateTag", + "file": "artifactregistry_v1_generated_artifact_registry_update_tag_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateRepository_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateTag_async", "segments": [ { "end": 50, @@ -5409,7 +7560,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_repository_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_tag_async.py" }, { "canonical": true, @@ -5418,23 +7569,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_repository", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_tag", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateRepository", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateTag", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateRepository" + "shortName": "UpdateTag" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateRepositoryRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateTagRequest" }, { - "name": "repository", - "type": "google.cloud.artifactregistry_v1.types.Repository" + "name": "tag", + "type": "google.cloud.artifactregistry_v1.types.Tag" }, { "name": "update_mask", @@ -5453,14 +7604,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Repository", - "shortName": "update_repository" + "resultType": "google.cloud.artifactregistry_v1.types.Tag", + "shortName": "update_tag" }, - "description": "Sample for UpdateRepository", - "file": "artifactregistry_v1_generated_artifact_registry_update_repository_sync.py", + "description": "Sample for UpdateTag", + "file": "artifactregistry_v1_generated_artifact_registry_update_tag_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateRepository_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateTag_sync", "segments": [ { "end": 50, @@ -5493,7 +7644,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_repository_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_tag_sync.py" }, { "canonical": true, @@ -5503,23 +7654,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_vpcsc_config", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateVPCSCConfig", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateTag" + "shortName": "UpdateVPCSCConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateTagRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest" }, { - "name": "tag", - "type": "google.cloud.artifactregistry_v1.types.Tag" + "name": "vpcsc_config", + "type": "google.cloud.artifactregistry_v1.types.VPCSCConfig" }, { "name": "update_mask", @@ -5538,14 +7689,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Tag", - "shortName": "update_tag" + "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", + "shortName": "update_vpcsc_config" }, - "description": "Sample for UpdateTag", - "file": "artifactregistry_v1_generated_artifact_registry_update_tag_async.py", + "description": "Sample for UpdateVPCSCConfig", + "file": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateTag_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateVPCSCConfig_async", "segments": [ { "end": 50, @@ -5578,7 +7729,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_tag_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_async.py" }, { "canonical": true, @@ -5587,23 +7738,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_tag", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_vpcsc_config", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateTag", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateVPCSCConfig", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateTag" + "shortName": "UpdateVPCSCConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateTagRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest" }, { - "name": "tag", - "type": "google.cloud.artifactregistry_v1.types.Tag" + "name": "vpcsc_config", + "type": "google.cloud.artifactregistry_v1.types.VPCSCConfig" }, { "name": "update_mask", @@ -5622,14 +7773,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.Tag", - "shortName": "update_tag" + "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", + "shortName": "update_vpcsc_config" }, - "description": "Sample for UpdateTag", - "file": "artifactregistry_v1_generated_artifact_registry_update_tag_sync.py", + "description": "Sample for UpdateVPCSCConfig", + "file": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateTag_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateVPCSCConfig_sync", "segments": [ { "end": 50, @@ -5662,7 +7813,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_tag_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_sync.py" }, { "canonical": true, @@ -5672,23 +7823,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", "shortName": "ArtifactRegistryAsyncClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_vpcsc_config", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.update_version", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateVPCSCConfig", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateVersion", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateVPCSCConfig" + "shortName": "UpdateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateVersionRequest" }, { - "name": "vpcsc_config", - "type": "google.cloud.artifactregistry_v1.types.VPCSCConfig" + "name": "version", + "type": "google.cloud.artifactregistry_v1.types.Version" }, { "name": "update_mask", @@ -5707,14 +7858,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", - "shortName": "update_vpcsc_config" + "resultType": "google.cloud.artifactregistry_v1.types.Version", + "shortName": "update_version" }, - "description": "Sample for UpdateVPCSCConfig", - "file": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_async.py", + "description": "Sample for UpdateVersion", + "file": "artifactregistry_v1_generated_artifact_registry_update_version_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateVPCSCConfig_async", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateVersion_async", "segments": [ { "end": 50, @@ -5747,7 +7898,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_async.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_version_async.py" }, { "canonical": true, @@ -5756,23 +7907,23 @@ "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", "shortName": "ArtifactRegistryClient" }, - "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_vpcsc_config", + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.update_version", "method": { - "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateVPCSCConfig", + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.UpdateVersion", "service": { "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", "shortName": "ArtifactRegistry" }, - "shortName": "UpdateVPCSCConfig" + "shortName": "UpdateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.artifactregistry_v1.types.UpdateVPCSCConfigRequest" + "type": "google.cloud.artifactregistry_v1.types.UpdateVersionRequest" }, { - "name": "vpcsc_config", - "type": "google.cloud.artifactregistry_v1.types.VPCSCConfig" + "name": "version", + "type": "google.cloud.artifactregistry_v1.types.Version" }, { "name": "update_mask", @@ -5791,14 +7942,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.artifactregistry_v1.types.VPCSCConfig", - "shortName": "update_vpcsc_config" + "resultType": "google.cloud.artifactregistry_v1.types.Version", + "shortName": "update_version" }, - "description": "Sample for UpdateVPCSCConfig", - "file": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_sync.py", + "description": "Sample for UpdateVersion", + "file": "artifactregistry_v1_generated_artifact_registry_update_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateVPCSCConfig_sync", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_UpdateVersion_sync", "segments": [ { "end": 50, @@ -5831,7 +7982,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "artifactregistry_v1_generated_artifact_registry_update_vpcsc_config_sync.py" + "title": "artifactregistry_v1_generated_artifact_registry_update_version_sync.py" } ] } diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json index 8f243dc2f395..191eb79f1ee3 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.12.0" + "version": "1.13.1" }, "snippets": [ { diff --git a/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py b/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py index 6316b6b72949..35798fee7dc3 100644 --- a/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py +++ b/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py @@ -40,12 +40,18 @@ class artifactregistryCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'batch_delete_versions': ('names', 'parent', 'validate_only', ), + 'create_attachment': ('parent', 'attachment_id', 'attachment', ), 'create_repository': ('parent', 'repository_id', 'repository', ), + 'create_rule': ('parent', 'rule_id', 'rule', ), 'create_tag': ('parent', 'tag_id', 'tag', ), + 'delete_attachment': ('name', ), + 'delete_file': ('name', ), 'delete_package': ('name', ), 'delete_repository': ('name', ), + 'delete_rule': ('name', ), 'delete_tag': ('name', ), 'delete_version': ('name', 'force', ), + 'get_attachment': ('name', ), 'get_docker_image': ('name', ), 'get_file': ('name', ), 'get_iam_policy': ('resource', 'options', ), @@ -55,25 +61,32 @@ class artifactregistryCallTransformer(cst.CSTTransformer): 'get_project_settings': ('name', ), 'get_python_package': ('name', ), 'get_repository': ('name', ), + 'get_rule': ('name', ), 'get_tag': ('name', ), 'get_version': ('name', 'view', ), 'get_vpcsc_config': ('name', ), 'import_apt_artifacts': ('gcs_source', 'parent', ), 'import_yum_artifacts': ('gcs_source', 'parent', ), + 'list_attachments': ('parent', 'filter', 'page_size', 'page_token', ), 'list_docker_images': ('parent', 'page_size', 'page_token', 'order_by', ), 'list_files': ('parent', 'filter', 'page_size', 'page_token', 'order_by', ), 'list_maven_artifacts': ('parent', 'page_size', 'page_token', ), 'list_npm_packages': ('parent', 'page_size', 'page_token', ), - 'list_packages': ('parent', 'page_size', 'page_token', ), + 'list_packages': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_python_packages': ('parent', 'page_size', 'page_token', ), - 'list_repositories': ('parent', 'page_size', 'page_token', ), + 'list_repositories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_rules': ('parent', 'page_size', 'page_token', ), 'list_tags': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_versions': ('parent', 'page_size', 'page_token', 'view', 'order_by', ), + 'list_versions': ('parent', 'page_size', 'page_token', 'view', 'order_by', 'filter', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), 'test_iam_permissions': ('resource', 'permissions', ), + 'update_file': ('file', 'update_mask', ), + 'update_package': ('package', 'update_mask', ), 'update_project_settings': ('project_settings', 'update_mask', ), 'update_repository': ('repository', 'update_mask', ), + 'update_rule': ('rule', 'update_mask', ), 'update_tag': ('tag', 'update_mask', ), + 'update_version': ('version', 'update_mask', ), 'update_vpcsc_config': ('vpcsc_config', 'update_mask', ), } diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py index 2532d0579d76..579c52851bd6 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py @@ -78,14 +78,23 @@ pagers, transports, ) -from google.cloud.artifactregistry_v1.types import apt_artifact, artifact, file, package from google.cloud.artifactregistry_v1.types import vpcsc_config as gda_vpcsc_config +from google.cloud.artifactregistry_v1.types import apt_artifact, artifact +from google.cloud.artifactregistry_v1.types import attachment +from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import file +from google.cloud.artifactregistry_v1.types import file as gda_file +from google.cloud.artifactregistry_v1.types import package +from google.cloud.artifactregistry_v1.types import package as gda_package from google.cloud.artifactregistry_v1.types import repository from google.cloud.artifactregistry_v1.types import repository as gda_repository +from google.cloud.artifactregistry_v1.types import rule +from google.cloud.artifactregistry_v1.types import rule as gda_rule from google.cloud.artifactregistry_v1.types import service, settings from google.cloud.artifactregistry_v1.types import tag from google.cloud.artifactregistry_v1.types import tag as gda_tag from google.cloud.artifactregistry_v1.types import version +from google.cloud.artifactregistry_v1.types import version as gda_version from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact @@ -348,86 +357,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ArtifactRegistryClient, transports.ArtifactRegistryGrpcTransport, "grpc"), - (ArtifactRegistryClient, transports.ArtifactRegistryRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5334,6 +5263,8 @@ def test_list_repositories_non_empty_request_with_auto_populated_field(): request = repository.ListRepositoriesRequest( parent="parent_value", page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5349,6 +5280,8 @@ def test_list_repositories_non_empty_request_with_auto_populated_field(): assert args[0] == repository.ListRepositoriesRequest( parent="parent_value", page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) @@ -5854,6 +5787,8 @@ def test_get_repository(request_type, transport: str = "grpc"): size_bytes=1089, satisfies_pzs=True, cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, ) response = client.get_repository(request) @@ -5873,6 +5808,8 @@ def test_get_repository(request_type, transport: str = "grpc"): assert response.size_bytes == 1089 assert response.satisfies_pzs is True assert response.cleanup_policy_dry_run is True + assert response.disallow_unspecified_mode is True + assert response.satisfies_pzi is True def test_get_repository_non_empty_request_with_auto_populated_field(): @@ -6006,6 +5943,8 @@ async def test_get_repository_async( size_bytes=1089, satisfies_pzs=True, cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, ) ) response = await client.get_repository(request) @@ -6026,6 +5965,8 @@ async def test_get_repository_async( assert response.size_bytes == 1089 assert response.satisfies_pzs is True assert response.cleanup_policy_dry_run is True + assert response.disallow_unspecified_mode is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -6596,6 +6537,8 @@ def test_update_repository(request_type, transport: str = "grpc"): size_bytes=1089, satisfies_pzs=True, cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, ) response = client.update_repository(request) @@ -6615,6 +6558,8 @@ def test_update_repository(request_type, transport: str = "grpc"): assert response.size_bytes == 1089 assert response.satisfies_pzs is True assert response.cleanup_policy_dry_run is True + assert response.disallow_unspecified_mode is True + assert response.satisfies_pzi is True def test_update_repository_non_empty_request_with_auto_populated_field(): @@ -6750,6 +6695,8 @@ async def test_update_repository_async( size_bytes=1089, satisfies_pzs=True, cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, ) ) response = await client.update_repository(request) @@ -6770,6 +6717,8 @@ async def test_update_repository_async( assert response.size_bytes == 1089 assert response.satisfies_pzs is True assert response.cleanup_policy_dry_run is True + assert response.disallow_unspecified_mode is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -7355,6 +7304,8 @@ def test_list_packages_non_empty_request_with_auto_populated_field(): request = package.ListPackagesRequest( parent="parent_value", page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7368,6 +7319,8 @@ def test_list_packages_non_empty_request_with_auto_populated_field(): assert args[0] == package.ListPackagesRequest( parent="parent_value", page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) @@ -8525,6 +8478,7 @@ def test_list_versions_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", order_by="order_by_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8539,6 +8493,7 @@ def test_list_versions_non_empty_request_with_auto_populated_field(): parent="parent_value", page_token="page_token_value", order_by="order_by_value", + filter="filter_value", ) @@ -10001,6 +9956,335 @@ async def test_batch_delete_versions_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + gda_version.UpdateVersionRequest, + dict, + ], +) +def test_update_version(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_version.Version( + name="name_value", + description="description_value", + ) + response = client.update_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gda_version.UpdateVersionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gda_version.Version) + assert response.name == "name_value" + assert response.description == "description_value" + + +def test_update_version_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gda_version.UpdateVersionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_version(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gda_version.UpdateVersionRequest() + + +def test_update_version_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_version in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_version] = mock_rpc + request = {} + client.update_version(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_version(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_version_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_version + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_version + ] = mock_rpc + + request = {} + await client.update_version(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_version(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_version_async( + transport: str = "grpc_asyncio", request_type=gda_version.UpdateVersionRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_version.Version( + name="name_value", + description="description_value", + ) + ) + response = await client.update_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gda_version.UpdateVersionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gda_version.Version) + assert response.name == "name_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_update_version_async_from_dict(): + await test_update_version_async(request_type=dict) + + +def test_update_version_field_headers(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_version.UpdateVersionRequest() + + request.version.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + call.return_value = gda_version.Version() + client.update_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "version.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_version_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_version.UpdateVersionRequest() + + request.version.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_version.Version()) + await client.update_version(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "version.name=name_value", + ) in kw["metadata"] + + +def test_update_version_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_version.Version() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_version( + version=gda_version.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].version + mock_val = gda_version.Version(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_version_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_version( + gda_version.UpdateVersionRequest(), + version=gda_version.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_version_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_version.Version() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_version.Version()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_version( + version=gda_version.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].version + mock_val = gda_version.Version(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_version_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_version( + gda_version.UpdateVersionRequest(), + version=gda_version.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -10850,11 +11134,11 @@ async def test_get_file_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - tag.ListTagsRequest, + file.DeleteFileRequest, dict, ], ) -def test_list_tags(request_type, transport: str = "grpc"): +def test_delete_file(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10865,25 +11149,22 @@ def test_list_tags(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = tag.ListTagsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_tags(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = tag.ListTagsRequest() + request = file.DeleteFileRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) -def test_list_tags_non_empty_request_with_auto_populated_field(): +def test_delete_file_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -10894,28 +11175,24 @@ def test_list_tags_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = tag.ListTagsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", + request = file.DeleteFileRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_tags(request=request) + client.delete_file(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == tag.ListTagsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", + assert args[0] == file.DeleteFileRequest( + name="name_value", ) -def test_list_tags_use_cached_wrapped_rpc(): +def test_delete_file_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10929,21 +11206,26 @@ def test_list_tags_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_tags in client._transport._wrapped_methods + assert client._transport.delete_file in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_tags] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_file] = mock_rpc request = {} - client.list_tags(request) + client.delete_file(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_tags(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10951,7 +11233,9 @@ def test_list_tags_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_file_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10966,7 +11250,7 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.list_tags + client._client._transport.delete_file in client._client._transport._wrapped_methods ) @@ -10974,16 +11258,21 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_tags + client._client._transport.delete_file ] = mock_rpc request = {} - await client.list_tags(request) + await client.delete_file(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_tags(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10991,8 +11280,8 @@ async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_list_tags_async( - transport: str = "grpc_asyncio", request_type=tag.ListTagsRequest +async def test_delete_file_async( + transport: str = "grpc_asyncio", request_type=file.DeleteFileRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -11004,46 +11293,43 @@ async def test_list_tags_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - tag.ListTagsResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_tags(request) + response = await client.delete_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = tag.ListTagsRequest() + request = file.DeleteFileRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_tags_async_from_dict(): - await test_list_tags_async(request_type=dict) +async def test_delete_file_async_from_dict(): + await test_delete_file_async(request_type=dict) -def test_list_tags_field_headers(): +def test_delete_file_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = tag.ListTagsRequest() + request = file.DeleteFileRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: - call.return_value = tag.ListTagsResponse() - client.list_tags(request) + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11054,28 +11340,28 @@ def test_list_tags_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_tags_field_headers_async(): +async def test_delete_file_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = tag.ListTagsRequest() + request = file.DeleteFileRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - tag.ListTagsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_tags(request) + await client.delete_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11086,35 +11372,35 @@ async def test_list_tags_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_tags_flattened(): +def test_delete_file_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = tag.ListTagsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_tags( - parent="parent_value", + client.delete_file( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_tags_flattened_error(): +def test_delete_file_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11122,43 +11408,43 @@ def test_list_tags_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_tags( - tag.ListTagsRequest(), - parent="parent_value", + client.delete_file( + file.DeleteFileRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_tags_flattened_async(): +async def test_delete_file_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = tag.ListTagsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - tag.ListTagsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_tags( - parent="parent_value", + response = await client.delete_file( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_tags_flattened_error_async(): +async def test_delete_file_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11166,214 +11452,20 @@ async def test_list_tags_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_tags( - tag.ListTagsRequest(), - parent="parent_value", - ) - - -def test_list_tags_pager(transport_name: str = "grpc"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - tag.Tag(), - ], - next_page_token="abc", - ), - tag.ListTagsResponse( - tags=[], - next_page_token="def", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - ], - next_page_token="ghi", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_tags(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tag.Tag) for i in results) - - -def test_list_tags_pages(transport_name: str = "grpc"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - tag.Tag(), - ], - next_page_token="abc", - ), - tag.ListTagsResponse( - tags=[], - next_page_token="def", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - ], - next_page_token="ghi", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tags(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_tags_async_pager(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - tag.Tag(), - ], - next_page_token="abc", - ), - tag.ListTagsResponse( - tags=[], - next_page_token="def", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - ], - next_page_token="ghi", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tags( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, tag.Tag) for i in responses) - - -@pytest.mark.asyncio -async def test_list_tags_async_pages(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - tag.Tag(), - ], - next_page_token="abc", - ), - tag.ListTagsResponse( - tags=[], - next_page_token="def", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - ], - next_page_token="ghi", - ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - ], - ), - RuntimeError, + await client.delete_file( + file.DeleteFileRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tags(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - tag.GetTagRequest, + gda_file.UpdateFileRequest, dict, ], ) -def test_get_tag(request_type, transport: str = "grpc"): +def test_update_file(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11384,27 +11476,29 @@ def test_get_tag(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_file), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = tag.Tag( + call.return_value = gda_file.File( name="name_value", - version="version_value", + size_bytes=1089, + owner="owner_value", ) - response = client.get_tag(request) + response = client.update_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = tag.GetTagRequest() + request = gda_file.UpdateFileRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, tag.Tag) + assert isinstance(response, gda_file.File) assert response.name == "name_value" - assert response.version == "version_value" + assert response.size_bytes == 1089 + assert response.owner == "owner_value" -def test_get_tag_non_empty_request_with_auto_populated_field(): +def test_update_file_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -11415,24 +11509,20 @@ def test_get_tag_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = tag.GetTagRequest( - name="name_value", - ) + request = gda_file.UpdateFileRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_file), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_tag(request=request) + client.update_file(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == tag.GetTagRequest( - name="name_value", - ) + assert args[0] == gda_file.UpdateFileRequest() -def test_get_tag_use_cached_wrapped_rpc(): +def test_update_file_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11446,21 +11536,21 @@ def test_get_tag_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_tag in client._transport._wrapped_methods + assert client._transport.update_file in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_tag] = mock_rpc + client._transport._wrapped_methods[client._transport.update_file] = mock_rpc request = {} - client.get_tag(request) + client.update_file(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_tag(request) + client.update_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11468,7 +11558,9 @@ def test_get_tag_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_file_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -11483,7 +11575,7 @@ async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_async # Ensure method has been cached assert ( - client._client._transport.get_tag + client._client._transport.update_file in client._client._transport._wrapped_methods ) @@ -11491,16 +11583,16 @@ async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_async mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_tag + client._client._transport.update_file ] = mock_rpc request = {} - await client.get_tag(request) + await client.update_file(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_tag(request) + await client.update_file(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11508,8 +11600,8 @@ async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_async @pytest.mark.asyncio -async def test_get_tag_async( - transport: str = "grpc_asyncio", request_type=tag.GetTagRequest +async def test_update_file_async( + transport: str = "grpc_asyncio", request_type=gda_file.UpdateFileRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -11521,48 +11613,50 @@ async def test_get_tag_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_file), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - tag.Tag( + gda_file.File( name="name_value", - version="version_value", + size_bytes=1089, + owner="owner_value", ) ) - response = await client.get_tag(request) + response = await client.update_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = tag.GetTagRequest() + request = gda_file.UpdateFileRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, tag.Tag) + assert isinstance(response, gda_file.File) assert response.name == "name_value" - assert response.version == "version_value" + assert response.size_bytes == 1089 + assert response.owner == "owner_value" @pytest.mark.asyncio -async def test_get_tag_async_from_dict(): - await test_get_tag_async(request_type=dict) +async def test_update_file_async_from_dict(): + await test_update_file_async(request_type=dict) -def test_get_tag_field_headers(): +def test_update_file_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = tag.GetTagRequest() + request = gda_file.UpdateFileRequest() - request.name = "name_value" + request.file.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: - call.return_value = tag.Tag() - client.get_tag(request) + with mock.patch.object(type(client.transport.update_file), "__call__") as call: + call.return_value = gda_file.File() + client.update_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11573,26 +11667,26 @@ def test_get_tag_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "file.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_tag_field_headers_async(): +async def test_update_file_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = tag.GetTagRequest() + request = gda_file.UpdateFileRequest() - request.name = "name_value" + request.file.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tag.Tag()) - await client.get_tag(request) + with mock.patch.object(type(client.transport.update_file), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_file.File()) + await client.update_file(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11603,35 +11697,39 @@ async def test_get_tag_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "file.name=name_value", ) in kw["metadata"] -def test_get_tag_flattened(): +def test_update_file_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_file), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = tag.Tag() + call.return_value = gda_file.File() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_tag( - name="name_value", + client.update_file( + file=gda_file.File(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].file + mock_val = gda_file.File(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_tag_flattened_error(): +def test_update_file_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11639,41 +11737,46 @@ def test_get_tag_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_tag( - tag.GetTagRequest(), - name="name_value", + client.update_file( + gda_file.UpdateFileRequest(), + file=gda_file.File(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_tag_flattened_async(): +async def test_update_file_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_file), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = tag.Tag() + call.return_value = gda_file.File() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tag.Tag()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_file.File()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_tag( - name="name_value", + response = await client.update_file( + file=gda_file.File(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].file + mock_val = gda_file.File(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_tag_flattened_error_async(): +async def test_update_file_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11681,20 +11784,21 @@ async def test_get_tag_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_tag( - tag.GetTagRequest(), - name="name_value", + await client.update_file( + gda_file.UpdateFileRequest(), + file=gda_file.File(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - gda_tag.CreateTagRequest, + tag.ListTagsRequest, dict, ], ) -def test_create_tag(request_type, transport: str = "grpc"): +def test_list_tags(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11705,27 +11809,25 @@ def test_create_tag(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_tag.Tag( - name="name_value", - version="version_value", + call.return_value = tag.ListTagsResponse( + next_page_token="next_page_token_value", ) - response = client.create_tag(request) + response = client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = gda_tag.CreateTagRequest() + request = tag.ListTagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gda_tag.Tag) - assert response.name == "name_value" - assert response.version == "version_value" + assert isinstance(response, pagers.ListTagsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_tag_non_empty_request_with_auto_populated_field(): +def test_list_tags_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -11736,26 +11838,28 @@ def test_create_tag_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = gda_tag.CreateTagRequest( + request = tag.ListTagsRequest( parent="parent_value", - tag_id="tag_id_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_tag(request=request) + client.list_tags(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gda_tag.CreateTagRequest( + assert args[0] == tag.ListTagsRequest( parent="parent_value", - tag_id="tag_id_value", + filter="filter_value", + page_token="page_token_value", ) -def test_create_tag_use_cached_wrapped_rpc(): +def test_list_tags_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11769,21 +11873,21 @@ def test_create_tag_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_tag in client._transport._wrapped_methods + assert client._transport.list_tags in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_tag] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tags] = mock_rpc request = {} - client.create_tag(request) + client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_tag(request) + client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11791,7 +11895,7 @@ def test_create_tag_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_tags_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -11806,7 +11910,7 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.create_tag + client._client._transport.list_tags in client._client._transport._wrapped_methods ) @@ -11814,16 +11918,16 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_tag + client._client._transport.list_tags ] = mock_rpc request = {} - await client.create_tag(request) + await client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_tag(request) + await client.list_tags(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11831,8 +11935,8 @@ async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_create_tag_async( - transport: str = "grpc_asyncio", request_type=gda_tag.CreateTagRequest +async def test_list_tags_async( + transport: str = "grpc_asyncio", request_type=tag.ListTagsRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -11844,48 +11948,46 @@ async def test_create_tag_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_tag.Tag( - name="name_value", - version="version_value", + tag.ListTagsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_tag(request) + response = await client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = gda_tag.CreateTagRequest() + request = tag.ListTagsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gda_tag.Tag) - assert response.name == "name_value" - assert response.version == "version_value" + assert isinstance(response, pagers.ListTagsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_tag_async_from_dict(): - await test_create_tag_async(request_type=dict) +async def test_list_tags_async_from_dict(): + await test_list_tags_async(request_type=dict) -def test_create_tag_field_headers(): +def test_list_tags_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gda_tag.CreateTagRequest() + request = tag.ListTagsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: - call.return_value = gda_tag.Tag() - client.create_tag(request) + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + call.return_value = tag.ListTagsResponse() + client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11901,21 +12003,23 @@ def test_create_tag_field_headers(): @pytest.mark.asyncio -async def test_create_tag_field_headers_async(): +async def test_list_tags_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gda_tag.CreateTagRequest() + request = tag.ListTagsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) - await client.create_tag(request) + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tag.ListTagsResponse() + ) + await client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11930,21 +12034,19 @@ async def test_create_tag_field_headers_async(): ) in kw["metadata"] -def test_create_tag_flattened(): +def test_list_tags_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_tag.Tag() + call.return_value = tag.ListTagsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_tag( + client.list_tags( parent="parent_value", - tag=gda_tag.Tag(name="name_value"), - tag_id="tag_id_value", ) # Establish that the underlying call was made with the expected @@ -11954,15 +12056,9 @@ def test_create_tag_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].tag - mock_val = gda_tag.Tag(name="name_value") - assert arg == mock_val - arg = args[0].tag_id - mock_val = "tag_id_value" - assert arg == mock_val -def test_create_tag_flattened_error(): +def test_list_tags_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11970,32 +12066,30 @@ def test_create_tag_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_tag( - gda_tag.CreateTagRequest(), + client.list_tags( + tag.ListTagsRequest(), parent="parent_value", - tag=gda_tag.Tag(name="name_value"), - tag_id="tag_id_value", ) @pytest.mark.asyncio -async def test_create_tag_flattened_async(): +async def test_list_tags_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_tag.Tag() + call.return_value = tag.ListTagsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tag.ListTagsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_tag( + response = await client.list_tags( parent="parent_value", - tag=gda_tag.Tag(name="name_value"), - tag_id="tag_id_value", ) # Establish that the underlying call was made with the expected @@ -12005,16 +12099,10 @@ async def test_create_tag_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].tag - mock_val = gda_tag.Tag(name="name_value") - assert arg == mock_val - arg = args[0].tag_id - mock_val = "tag_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_create_tag_flattened_error_async(): +async def test_list_tags_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12022,22 +12110,214 @@ async def test_create_tag_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_tag( - gda_tag.CreateTagRequest(), + await client.list_tags( + tag.ListTagsRequest(), parent="parent_value", - tag=gda_tag.Tag(name="name_value"), - tag_id="tag_id_value", ) +def test_list_tags_pager(transport_name: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + tag.Tag(), + ], + next_page_token="abc", + ), + tag.ListTagsResponse( + tags=[], + next_page_token="def", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + ], + next_page_token="ghi", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tags(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tag.Tag) for i in results) + + +def test_list_tags_pages(transport_name: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + tag.Tag(), + ], + next_page_token="abc", + ), + tag.ListTagsResponse( + tags=[], + next_page_token="def", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + ], + next_page_token="ghi", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tags_async_pager(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + tag.Tag(), + ], + next_page_token="abc", + ), + tag.ListTagsResponse( + tags=[], + next_page_token="def", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + ], + next_page_token="ghi", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tags( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tag.Tag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tags_async_pages(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + tag.Tag(), + ], + next_page_token="abc", + ), + tag.ListTagsResponse( + tags=[], + next_page_token="def", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + ], + next_page_token="ghi", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tags(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ - gda_tag.UpdateTagRequest, + tag.GetTagRequest, dict, ], ) -def test_update_tag(request_type, transport: str = "grpc"): +def test_get_tag(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12048,27 +12328,27 @@ def test_update_tag(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_tag.Tag( + call.return_value = tag.Tag( name="name_value", version="version_value", ) - response = client.update_tag(request) + response = client.get_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = gda_tag.UpdateTagRequest() + request = tag.GetTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gda_tag.Tag) + assert isinstance(response, tag.Tag) assert response.name == "name_value" assert response.version == "version_value" -def test_update_tag_non_empty_request_with_auto_populated_field(): +def test_get_tag_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -12079,20 +12359,24 @@ def test_update_tag_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = gda_tag.UpdateTagRequest() + request = tag.GetTagRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_tag(request=request) + client.get_tag(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gda_tag.UpdateTagRequest() + assert args[0] == tag.GetTagRequest( + name="name_value", + ) -def test_update_tag_use_cached_wrapped_rpc(): +def test_get_tag_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12106,21 +12390,21 @@ def test_update_tag_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_tag in client._transport._wrapped_methods + assert client._transport.get_tag in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_tag] = mock_rpc + client._transport._wrapped_methods[client._transport.get_tag] = mock_rpc request = {} - client.update_tag(request) + client.get_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_tag(request) + client.get_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12128,7 +12412,7 @@ def test_update_tag_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12143,7 +12427,7 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.update_tag + client._client._transport.get_tag in client._client._transport._wrapped_methods ) @@ -12151,16 +12435,16 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_tag + client._client._transport.get_tag ] = mock_rpc request = {} - await client.update_tag(request) + await client.get_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_tag(request) + await client.get_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12168,8 +12452,8 @@ async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_update_tag_async( - transport: str = "grpc_asyncio", request_type=gda_tag.UpdateTagRequest +async def test_get_tag_async( + transport: str = "grpc_asyncio", request_type=tag.GetTagRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -12181,48 +12465,48 @@ async def test_update_tag_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_tag.Tag( + tag.Tag( name="name_value", version="version_value", ) ) - response = await client.update_tag(request) + response = await client.get_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = gda_tag.UpdateTagRequest() + request = tag.GetTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gda_tag.Tag) + assert isinstance(response, tag.Tag) assert response.name == "name_value" assert response.version == "version_value" @pytest.mark.asyncio -async def test_update_tag_async_from_dict(): - await test_update_tag_async(request_type=dict) +async def test_get_tag_async_from_dict(): + await test_get_tag_async(request_type=dict) -def test_update_tag_field_headers(): +def test_get_tag_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gda_tag.UpdateTagRequest() + request = tag.GetTagRequest() - request.tag.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: - call.return_value = gda_tag.Tag() - client.update_tag(request) + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + call.return_value = tag.Tag() + client.get_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12233,26 +12517,26 @@ def test_update_tag_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "tag.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_tag_field_headers_async(): +async def test_get_tag_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gda_tag.UpdateTagRequest() + request = tag.GetTagRequest() - request.tag.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) - await client.update_tag(request) + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tag.Tag()) + await client.get_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12263,39 +12547,35 @@ async def test_update_tag_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "tag.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_tag_flattened(): +def test_get_tag_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_tag.Tag() + call.return_value = tag.Tag() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_tag( - tag=gda_tag.Tag(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_tag( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].tag - mock_val = gda_tag.Tag(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_tag_flattened_error(): +def test_get_tag_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12303,46 +12583,41 @@ def test_update_tag_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_tag( - gda_tag.UpdateTagRequest(), - tag=gda_tag.Tag(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_tag( + tag.GetTagRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_tag_flattened_async(): +async def test_get_tag_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_tag.Tag() + call.return_value = tag.Tag() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tag.Tag()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_tag( - tag=gda_tag.Tag(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_tag( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].tag - mock_val = gda_tag.Tag(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_tag_flattened_error_async(): +async def test_get_tag_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12350,21 +12625,20 @@ async def test_update_tag_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_tag( - gda_tag.UpdateTagRequest(), - tag=gda_tag.Tag(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_tag( + tag.GetTagRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - tag.DeleteTagRequest, + gda_tag.CreateTagRequest, dict, ], ) -def test_delete_tag(request_type, transport: str = "grpc"): +def test_create_tag(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12375,22 +12649,27 @@ def test_delete_tag(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_tag(request) + call.return_value = gda_tag.Tag( + name="name_value", + version="version_value", + ) + response = client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = tag.DeleteTagRequest() + request = gda_tag.CreateTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gda_tag.Tag) + assert response.name == "name_value" + assert response.version == "version_value" -def test_delete_tag_non_empty_request_with_auto_populated_field(): +def test_create_tag_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -12401,24 +12680,26 @@ def test_delete_tag_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = tag.DeleteTagRequest( - name="name_value", + request = gda_tag.CreateTagRequest( + parent="parent_value", + tag_id="tag_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_tag(request=request) + client.create_tag(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == tag.DeleteTagRequest( - name="name_value", + assert args[0] == gda_tag.CreateTagRequest( + parent="parent_value", + tag_id="tag_id_value", ) -def test_delete_tag_use_cached_wrapped_rpc(): +def test_create_tag_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12432,21 +12713,21 @@ def test_delete_tag_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_tag in client._transport._wrapped_methods + assert client._transport.create_tag in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_tag] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tag] = mock_rpc request = {} - client.delete_tag(request) + client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_tag(request) + client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12454,7 +12735,7 @@ def test_delete_tag_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12469,7 +12750,7 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.delete_tag + client._client._transport.create_tag in client._client._transport._wrapped_methods ) @@ -12477,16 +12758,16 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_tag + client._client._transport.create_tag ] = mock_rpc request = {} - await client.delete_tag(request) + await client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_tag(request) + await client.create_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12494,8 +12775,8 @@ async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_delete_tag_async( - transport: str = "grpc_asyncio", request_type=tag.DeleteTagRequest +async def test_create_tag_async( + transport: str = "grpc_asyncio", request_type=gda_tag.CreateTagRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -12507,41 +12788,48 @@ async def test_delete_tag_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_tag.Tag( + name="name_value", + version="version_value", + ) + ) + response = await client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = tag.DeleteTagRequest() + request = gda_tag.CreateTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gda_tag.Tag) + assert response.name == "name_value" + assert response.version == "version_value" @pytest.mark.asyncio -async def test_delete_tag_async_from_dict(): - await test_delete_tag_async(request_type=dict) +async def test_create_tag_async_from_dict(): + await test_create_tag_async(request_type=dict) -def test_delete_tag_field_headers(): +def test_create_tag_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = tag.DeleteTagRequest() + request = gda_tag.CreateTagRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: - call.return_value = None - client.delete_tag(request) + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + call.return_value = gda_tag.Tag() + client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12552,26 +12840,26 @@ def test_delete_tag_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_tag_field_headers_async(): +async def test_create_tag_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = tag.DeleteTagRequest() + request = gda_tag.CreateTagRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag(request) + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) + await client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12582,35 +12870,43 @@ async def test_delete_tag_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_tag_flattened(): +def test_create_tag_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = gda_tag.Tag() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_tag( - name="name_value", + client.create_tag( + parent="parent_value", + tag=gda_tag.Tag(name="name_value"), + tag_id="tag_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].tag + mock_val = gda_tag.Tag(name="name_value") + assert arg == mock_val + arg = args[0].tag_id + mock_val = "tag_id_value" assert arg == mock_val -def test_delete_tag_flattened_error(): +def test_create_tag_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12618,41 +12914,51 @@ def test_delete_tag_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_tag( - tag.DeleteTagRequest(), - name="name_value", + client.create_tag( + gda_tag.CreateTagRequest(), + parent="parent_value", + tag=gda_tag.Tag(name="name_value"), + tag_id="tag_id_value", ) @pytest.mark.asyncio -async def test_delete_tag_flattened_async(): +async def test_create_tag_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = gda_tag.Tag() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_tag( - name="name_value", + response = await client.create_tag( + parent="parent_value", + tag=gda_tag.Tag(name="name_value"), + tag_id="tag_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].tag + mock_val = gda_tag.Tag(name="name_value") + assert arg == mock_val + arg = args[0].tag_id + mock_val = "tag_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_tag_flattened_error_async(): +async def test_create_tag_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12660,20 +12966,22 @@ async def test_delete_tag_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_tag( - tag.DeleteTagRequest(), - name="name_value", + await client.create_tag( + gda_tag.CreateTagRequest(), + parent="parent_value", + tag=gda_tag.Tag(name="name_value"), + tag_id="tag_id_value", ) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + gda_tag.UpdateTagRequest, dict, ], ) -def test_set_iam_policy(request_type, transport: str = "grpc"): +def test_update_tag(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12684,27 +12992,27 @@ def test_set_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + call.return_value = gda_tag.Tag( + name="name_value", + version="version_value", ) - response = client.set_iam_policy(request) + response = client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() + request = gda_tag.UpdateTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, gda_tag.Tag) + assert response.name == "name_value" + assert response.version == "version_value" -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): +def test_update_tag_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -12715,24 +13023,20 @@ def test_set_iam_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) + request = gda_tag.UpdateTagRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.set_iam_policy(request=request) + client.update_tag(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) + assert args[0] == gda_tag.UpdateTagRequest() -def test_set_iam_policy_use_cached_wrapped_rpc(): +def test_update_tag_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12746,21 +13050,21 @@ def test_set_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods + assert client._transport.update_tag in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tag] = mock_rpc request = {} - client.set_iam_policy(request) + client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_iam_policy(request) + client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12768,9 +13072,7 @@ def test_set_iam_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_update_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -12785,7 +13087,7 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.set_iam_policy + client._client._transport.update_tag in client._client._transport._wrapped_methods ) @@ -12793,16 +13095,16 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.set_iam_policy + client._client._transport.update_tag ] = mock_rpc request = {} - await client.set_iam_policy(request) + await client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.set_iam_policy(request) + await client.update_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12810,8 +13112,8 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +async def test_update_tag_async( + transport: str = "grpc_asyncio", request_type=gda_tag.UpdateTagRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -12823,48 +13125,48 @@ async def test_set_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", + gda_tag.Tag( + name="name_value", + version="version_value", ) ) - response = await client.set_iam_policy(request) + response = await client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() + request = gda_tag.UpdateTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, gda_tag.Tag) + assert response.name == "name_value" + assert response.version == "version_value" @pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) +async def test_update_tag_async_from_dict(): + await test_update_tag_async(request_type=dict) -def test_set_iam_policy_field_headers(): +def test_update_tag_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = gda_tag.UpdateTagRequest() - request.resource = "resource_value" + request.tag.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + call.return_value = gda_tag.Tag() + client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12875,26 +13177,26 @@ def test_set_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "tag.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): +async def test_update_tag_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = gda_tag.UpdateTagRequest() - request.resource = "resource_value" + request.tag.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) + await client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12905,36 +13207,108 @@ async def test_set_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "tag.name=name_value", ) in kw["metadata"] -def test_set_iam_policy_from_dict_foreign(): +def test_update_tag_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), - } + call.return_value = gda_tag.Tag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tag( + tag=gda_tag.Tag(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tag + mock_val = gda_tag.Tag(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_tag_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag( + gda_tag.UpdateTagRequest(), + tag=gda_tag.Tag(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_tag_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_tag.Tag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_tag.Tag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tag( + tag=gda_tag.Tag(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tag + mock_val = gda_tag.Tag(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_tag_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tag( + gda_tag.UpdateTagRequest(), + tag=gda_tag.Tag(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - call.assert_called() @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.GetIamPolicyRequest, + tag.DeleteTagRequest, dict, ], ) -def test_get_iam_policy(request_type, transport: str = "grpc"): +def test_delete_tag(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12945,27 +13319,22 @@ def test_get_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - response = client.get_iam_policy(request) + call.return_value = None + response = client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() + request = tag.DeleteTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert response is None -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): +def test_delete_tag_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -12976,24 +13345,24 @@ def test_get_iam_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", + request = tag.DeleteTagRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_iam_policy(request=request) + client.delete_tag(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", + assert args[0] == tag.DeleteTagRequest( + name="name_value", ) -def test_get_iam_policy_use_cached_wrapped_rpc(): +def test_delete_tag_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13007,21 +13376,21 @@ def test_get_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods + assert client._transport.delete_tag in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tag] = mock_rpc request = {} - client.get_iam_policy(request) + client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_iam_policy(request) + client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13029,9 +13398,7 @@ def test_get_iam_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_delete_tag_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -13046,7 +13413,7 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_iam_policy + client._client._transport.delete_tag in client._client._transport._wrapped_methods ) @@ -13054,16 +13421,16 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_iam_policy + client._client._transport.delete_tag ] = mock_rpc request = {} - await client.get_iam_policy(request) + await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_iam_policy(request) + await client.delete_tag(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13071,8 +13438,8 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +async def test_delete_tag_async( + transport: str = "grpc_asyncio", request_type=tag.DeleteTagRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -13084,48 +13451,41 @@ async def test_get_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - ) - response = await client.get_iam_policy(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() + request = tag.DeleteTagRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert response is None @pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) +async def test_delete_tag_async_from_dict(): + await test_delete_tag_async(request_type=dict) -def test_get_iam_policy_field_headers(): +def test_delete_tag_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = tag.DeleteTagRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + call.return_value = None + client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13136,26 +13496,26 @@ def test_get_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): +async def test_delete_tag_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = tag.DeleteTagRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13166,35 +13526,98 @@ async def test_get_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] -def test_get_iam_policy_from_dict_foreign(): +def test_delete_tag_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tag( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_tag_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag( + tag.DeleteTagRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_tag_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tag( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_tag_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tag( + tag.DeleteTagRequest(), + name="name_value", ) - call.assert_called() @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, + gda_rule.CreateRuleRequest, dict, ], ) -def test_test_iam_permissions(request_type, transport: str = "grpc"): +def test_create_rule(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13205,27 +13628,31 @@ def test_test_iam_permissions(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], + call.return_value = gda_rule.Rule( + name="name_value", + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) - response = client.test_iam_permissions(request) + response = client.create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() + request = gda_rule.CreateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ["permissions_value"] + assert isinstance(response, gda_rule.Rule) + assert response.name == "name_value" + assert response.action == gda_rule.Rule.Action.ALLOW + assert response.operation == gda_rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): +def test_create_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -13236,26 +13663,26 @@ def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", + request = gda_rule.CreateRuleRequest( + parent="parent_value", + rule_id="rule_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.test_iam_permissions(request=request) + client.create_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", + assert args[0] == gda_rule.CreateRuleRequest( + parent="parent_value", + rule_id="rule_id_value", ) -def test_test_iam_permissions_use_cached_wrapped_rpc(): +def test_create_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13269,25 +13696,21 @@ def test_test_iam_permissions_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.test_iam_permissions in client._transport._wrapped_methods - ) + assert client._transport.create_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.test_iam_permissions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rule] = mock_rpc request = {} - client.test_iam_permissions(request) + client.create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.test_iam_permissions(request) + client.create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13295,7 +13718,7 @@ def test_test_iam_permissions_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc( +async def test_create_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13312,7 +13735,7 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.test_iam_permissions + client._client._transport.create_rule in client._client._transport._wrapped_methods ) @@ -13320,16 +13743,16 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.test_iam_permissions + client._client._transport.create_rule ] = mock_rpc request = {} - await client.test_iam_permissions(request) + await client.create_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.test_iam_permissions(request) + await client.create_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13337,9 +13760,8 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_test_iam_permissions_async( - transport: str = "grpc_asyncio", - request_type=iam_policy_pb2.TestIamPermissionsRequest, +async def test_create_rule_async( + transport: str = "grpc_asyncio", request_type=gda_rule.CreateRuleRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -13351,50 +13773,52 @@ async def test_test_iam_permissions_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], + gda_rule.Rule( + name="name_value", + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) ) - response = await client.test_iam_permissions(request) + response = await client.create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() + request = gda_rule.CreateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ["permissions_value"] + assert isinstance(response, gda_rule.Rule) + assert response.name == "name_value" + assert response.action == gda_rule.Rule.Action.ALLOW + assert response.operation == gda_rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" @pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) +async def test_create_rule_async_from_dict(): + await test_create_rule_async(request_type=dict) -def test_test_iam_permissions_field_headers(): +def test_create_rule_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() + request = gda_rule.CreateRuleRequest() - request.resource = "resource_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: + call.return_value = gda_rule.Rule() + client.create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13405,30 +13829,26 @@ def test_test_iam_permissions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): +async def test_create_rule_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() + request = gda_rule.CreateRuleRequest() - request.resource = "resource_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - await client.test_iam_permissions(request) + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_rule.Rule()) + await client.create_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13439,37 +13859,118 @@ async def test_test_iam_permissions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "parent=parent_value", ) in kw["metadata"] -def test_test_iam_permissions_from_dict_foreign(): +def test_create_rule_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } + call.return_value = gda_rule.Rule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_rule( + parent="parent_value", + rule=gda_rule.Rule(name="name_value"), + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rule + mock_val = gda_rule.Rule(name="name_value") + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +def test_create_rule_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_rule( + gda_rule.CreateRuleRequest(), + parent="parent_value", + rule=gda_rule.Rule(name="name_value"), + rule_id="rule_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_rule_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_rule.Rule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_rule.Rule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_rule( + parent="parent_value", + rule=gda_rule.Rule(name="name_value"), + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].rule + mock_val = gda_rule.Rule(name="name_value") + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_rule_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_rule( + gda_rule.CreateRuleRequest(), + parent="parent_value", + rule=gda_rule.Rule(name="name_value"), + rule_id="rule_id_value", ) - call.assert_called() @pytest.mark.parametrize( "request_type", [ - settings.GetProjectSettingsRequest, + rule.ListRulesRequest, dict, ], ) -def test_get_project_settings(request_type, transport: str = "grpc"): +def test_list_rules(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13480,32 +13981,25 @@ def test_get_project_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = settings.ProjectSettings( - name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + call.return_value = rule.ListRulesResponse( + next_page_token="next_page_token_value", ) - response = client.get_project_settings(request) + response = client.list_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = settings.GetProjectSettingsRequest() + request = rule.ListRulesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, settings.ProjectSettings) - assert response.name == "name_value" - assert ( - response.legacy_redirection_state - == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED - ) + assert isinstance(response, pagers.ListRulesPager) + assert response.next_page_token == "next_page_token_value" -def test_get_project_settings_non_empty_request_with_auto_populated_field(): +def test_list_rules_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -13516,26 +14010,26 @@ def test_get_project_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = settings.GetProjectSettingsRequest( - name="name_value", + request = rule.ListRulesRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_project_settings(request=request) + client.list_rules(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == settings.GetProjectSettingsRequest( - name="name_value", + assert args[0] == rule.ListRulesRequest( + parent="parent_value", + page_token="page_token_value", ) -def test_get_project_settings_use_cached_wrapped_rpc(): +def test_list_rules_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13549,25 +14043,21 @@ def test_get_project_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_project_settings in client._transport._wrapped_methods - ) + assert client._transport.list_rules in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_project_settings - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rules] = mock_rpc request = {} - client.get_project_settings(request) + client.list_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_project_settings(request) + client.list_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13575,9 +14065,7 @@ def test_get_project_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_project_settings_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_list_rules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -13592,7 +14080,7 @@ async def test_get_project_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_project_settings + client._client._transport.list_rules in client._client._transport._wrapped_methods ) @@ -13600,16 +14088,16 @@ async def test_get_project_settings_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_project_settings + client._client._transport.list_rules ] = mock_rpc request = {} - await client.get_project_settings(request) + await client.list_rules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_project_settings(request) + await client.list_rules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13617,8 +14105,8 @@ async def test_get_project_settings_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_project_settings_async( - transport: str = "grpc_asyncio", request_type=settings.GetProjectSettingsRequest +async def test_list_rules_async( + transport: str = "grpc_asyncio", request_type=rule.ListRulesRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -13630,55 +14118,46 @@ async def test_get_project_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings( - name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + rule.ListRulesResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_project_settings(request) + response = await client.list_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = settings.GetProjectSettingsRequest() + request = rule.ListRulesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, settings.ProjectSettings) - assert response.name == "name_value" - assert ( - response.legacy_redirection_state - == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED - ) + assert isinstance(response, pagers.ListRulesAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_project_settings_async_from_dict(): - await test_get_project_settings_async(request_type=dict) +async def test_list_rules_async_from_dict(): + await test_list_rules_async(request_type=dict) -def test_get_project_settings_field_headers(): +def test_list_rules_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = settings.GetProjectSettingsRequest() + request = rule.ListRulesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: - call.return_value = settings.ProjectSettings() - client.get_project_settings(request) + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: + call.return_value = rule.ListRulesResponse() + client.list_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13689,30 +14168,28 @@ def test_get_project_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_project_settings_field_headers_async(): +async def test_list_rules_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = settings.GetProjectSettingsRequest() + request = rule.ListRulesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings() + rule.ListRulesResponse() ) - await client.get_project_settings(request) + await client.list_rules(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13723,37 +14200,35 @@ async def test_get_project_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_project_settings_flattened(): +def test_list_rules_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = settings.ProjectSettings() + call.return_value = rule.ListRulesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_project_settings( - name="name_value", + client.list_rules( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_project_settings_flattened_error(): +def test_list_rules_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13761,45 +14236,43 @@ def test_get_project_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_project_settings( - settings.GetProjectSettingsRequest(), - name="name_value", + client.list_rules( + rule.ListRulesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_project_settings_flattened_async(): +async def test_list_rules_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = settings.ProjectSettings() + call.return_value = rule.ListRulesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings() + rule.ListRulesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_project_settings( - name="name_value", + response = await client.list_rules( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_project_settings_flattened_error_async(): +async def test_list_rules_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -13807,20 +14280,214 @@ async def test_get_project_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_project_settings( - settings.GetProjectSettingsRequest(), - name="name_value", + await client.list_rules( + rule.ListRulesRequest(), + parent="parent_value", + ) + + +def test_list_rules_pager(transport_name: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + rule.Rule(), + ], + next_page_token="abc", + ), + rule.ListRulesResponse( + rules=[], + next_page_token="def", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + ], + next_page_token="ghi", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_rules(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, rule.Rule) for i in results) + + +def test_list_rules_pages(transport_name: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + rule.Rule(), + ], + next_page_token="abc", + ), + rule.ListRulesResponse( + rules=[], + next_page_token="def", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + ], + next_page_token="ghi", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + ], + ), + RuntimeError, + ) + pages = list(client.list_rules(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_rules_async_pager(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rules), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + rule.Rule(), + ], + next_page_token="abc", + ), + rule.ListRulesResponse( + rules=[], + next_page_token="def", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + ], + next_page_token="ghi", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_rules( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, rule.Rule) for i in responses) + + +@pytest.mark.asyncio +async def test_list_rules_async_pages(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_rules), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + rule.Rule(), + ], + next_page_token="abc", + ), + rule.ListRulesResponse( + rules=[], + next_page_token="def", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + ], + next_page_token="ghi", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_rules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - settings.UpdateProjectSettingsRequest, + rule.GetRuleRequest, dict, ], ) -def test_update_project_settings(request_type, transport: str = "grpc"): +def test_get_rule(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13831,32 +14498,31 @@ def test_update_project_settings(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = settings.ProjectSettings( + call.return_value = rule.Rule( name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + action=rule.Rule.Action.ALLOW, + operation=rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) - response = client.update_project_settings(request) + response = client.get_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = settings.UpdateProjectSettingsRequest() + request = rule.GetRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, settings.ProjectSettings) + assert isinstance(response, rule.Rule) assert response.name == "name_value" - assert ( - response.legacy_redirection_state - == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED - ) + assert response.action == rule.Rule.Action.ALLOW + assert response.operation == rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" -def test_update_project_settings_non_empty_request_with_auto_populated_field(): +def test_get_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -13867,22 +14533,24 @@ def test_update_project_settings_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = settings.UpdateProjectSettingsRequest() + request = rule.GetRuleRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_project_settings(request=request) + client.get_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == settings.UpdateProjectSettingsRequest() + assert args[0] == rule.GetRuleRequest( + name="name_value", + ) -def test_update_project_settings_use_cached_wrapped_rpc(): +def test_get_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13896,26 +14564,21 @@ def test_update_project_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_project_settings - in client._transport._wrapped_methods - ) + assert client._transport.get_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_project_settings - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rule] = mock_rpc request = {} - client.update_project_settings(request) + client.get_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_project_settings(request) + client.get_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13923,9 +14586,7 @@ def test_update_project_settings_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_project_settings_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_rule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -13940,7 +14601,7 @@ async def test_update_project_settings_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_project_settings + client._client._transport.get_rule in client._client._transport._wrapped_methods ) @@ -13948,16 +14609,16 @@ async def test_update_project_settings_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_project_settings + client._client._transport.get_rule ] = mock_rpc request = {} - await client.update_project_settings(request) + await client.get_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_project_settings(request) + await client.get_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13965,8 +14626,8 @@ async def test_update_project_settings_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_project_settings_async( - transport: str = "grpc_asyncio", request_type=settings.UpdateProjectSettingsRequest +async def test_get_rule_async( + transport: str = "grpc_asyncio", request_type=rule.GetRuleRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -13978,55 +14639,52 @@ async def test_update_project_settings_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings( + rule.Rule( name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + action=rule.Rule.Action.ALLOW, + operation=rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) ) - response = await client.update_project_settings(request) + response = await client.get_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = settings.UpdateProjectSettingsRequest() + request = rule.GetRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, settings.ProjectSettings) + assert isinstance(response, rule.Rule) assert response.name == "name_value" - assert ( - response.legacy_redirection_state - == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED - ) + assert response.action == rule.Rule.Action.ALLOW + assert response.operation == rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" @pytest.mark.asyncio -async def test_update_project_settings_async_from_dict(): - await test_update_project_settings_async(request_type=dict) +async def test_get_rule_async_from_dict(): + await test_get_rule_async(request_type=dict) -def test_update_project_settings_field_headers(): +def test_get_rule_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = settings.UpdateProjectSettingsRequest() + request = rule.GetRuleRequest() - request.project_settings.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: - call.return_value = settings.ProjectSettings() - client.update_project_settings(request) + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: + call.return_value = rule.Rule() + client.get_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14037,30 +14695,26 @@ def test_update_project_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project_settings.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_project_settings_field_headers_async(): +async def test_get_rule_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = settings.UpdateProjectSettingsRequest() + request = rule.GetRuleRequest() - request.project_settings.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings() - ) - await client.update_project_settings(request) + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule.Rule()) + await client.get_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14071,41 +14725,35 @@ async def test_update_project_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project_settings.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_project_settings_flattened(): +def test_get_rule_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = settings.ProjectSettings() + call.return_value = rule.Rule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_project_settings( - project_settings=settings.ProjectSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].project_settings - mock_val = settings.ProjectSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_project_settings_flattened_error(): +def test_get_rule_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14113,50 +14761,41 @@ def test_update_project_settings_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_project_settings( - settings.UpdateProjectSettingsRequest(), - project_settings=settings.ProjectSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_rule( + rule.GetRuleRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_project_settings_flattened_async(): +async def test_get_rule_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = settings.ProjectSettings() + call.return_value = rule.Rule() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule.Rule()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_project_settings( - project_settings=settings.ProjectSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].project_settings - mock_val = settings.ProjectSettings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_project_settings_flattened_error_async(): +async def test_get_rule_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -14164,21 +14803,20 @@ async def test_update_project_settings_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_project_settings( - settings.UpdateProjectSettingsRequest(), - project_settings=settings.ProjectSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_rule( + rule.GetRuleRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - vpcsc_config.GetVPCSCConfigRequest, + gda_rule.UpdateRuleRequest, dict, ], ) -def test_get_vpcsc_config(request_type, transport: str = "grpc"): +def test_update_rule(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14189,27 +14827,31 @@ def test_get_vpcsc_config(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vpcsc_config.VPCSCConfig( + call.return_value = gda_rule.Rule( name="name_value", - vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) - response = client.get_vpcsc_config(request) + response = client.update_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = vpcsc_config.GetVPCSCConfigRequest() + request = gda_rule.UpdateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, vpcsc_config.VPCSCConfig) + assert isinstance(response, gda_rule.Rule) assert response.name == "name_value" - assert response.vpcsc_policy == vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY + assert response.action == gda_rule.Rule.Action.ALLOW + assert response.operation == gda_rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" -def test_get_vpcsc_config_non_empty_request_with_auto_populated_field(): +def test_update_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -14220,24 +14862,20 @@ def test_get_vpcsc_config_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = vpcsc_config.GetVPCSCConfigRequest( - name="name_value", - ) + request = gda_rule.UpdateRuleRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_vpcsc_config(request=request) + client.update_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vpcsc_config.GetVPCSCConfigRequest( - name="name_value", - ) + assert args[0] == gda_rule.UpdateRuleRequest() -def test_get_vpcsc_config_use_cached_wrapped_rpc(): +def test_update_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14251,23 +14889,21 @@ def test_get_vpcsc_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_vpcsc_config in client._transport._wrapped_methods + assert client._transport.update_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_vpcsc_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_rule] = mock_rpc request = {} - client.get_vpcsc_config(request) + client.update_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_vpcsc_config(request) + client.update_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14275,7 +14911,7 @@ def test_get_vpcsc_config_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_vpcsc_config_async_use_cached_wrapped_rpc( +async def test_update_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14292,7 +14928,7 @@ async def test_get_vpcsc_config_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_vpcsc_config + client._client._transport.update_rule in client._client._transport._wrapped_methods ) @@ -14300,16 +14936,16 @@ async def test_get_vpcsc_config_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_vpcsc_config + client._client._transport.update_rule ] = mock_rpc request = {} - await client.get_vpcsc_config(request) + await client.update_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_vpcsc_config(request) + await client.update_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14317,8 +14953,8 @@ async def test_get_vpcsc_config_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_vpcsc_config_async( - transport: str = "grpc_asyncio", request_type=vpcsc_config.GetVPCSCConfigRequest +async def test_update_rule_async( + transport: str = "grpc_asyncio", request_type=gda_rule.UpdateRuleRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -14330,48 +14966,52 @@ async def test_get_vpcsc_config_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vpcsc_config.VPCSCConfig( + gda_rule.Rule( name="name_value", - vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) ) - response = await client.get_vpcsc_config(request) + response = await client.update_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = vpcsc_config.GetVPCSCConfigRequest() + request = gda_rule.UpdateRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, vpcsc_config.VPCSCConfig) + assert isinstance(response, gda_rule.Rule) assert response.name == "name_value" - assert response.vpcsc_policy == vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY + assert response.action == gda_rule.Rule.Action.ALLOW + assert response.operation == gda_rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" @pytest.mark.asyncio -async def test_get_vpcsc_config_async_from_dict(): - await test_get_vpcsc_config_async(request_type=dict) +async def test_update_rule_async_from_dict(): + await test_update_rule_async(request_type=dict) -def test_get_vpcsc_config_field_headers(): +def test_update_rule_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vpcsc_config.GetVPCSCConfigRequest() + request = gda_rule.UpdateRuleRequest() - request.name = "name_value" + request.rule.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: - call.return_value = vpcsc_config.VPCSCConfig() - client.get_vpcsc_config(request) + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: + call.return_value = gda_rule.Rule() + client.update_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14382,28 +15022,26 @@ def test_get_vpcsc_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rule.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_vpcsc_config_field_headers_async(): +async def test_update_rule_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vpcsc_config.GetVPCSCConfigRequest() + request = gda_rule.UpdateRuleRequest() - request.name = "name_value" + request.rule.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vpcsc_config.VPCSCConfig() - ) - await client.get_vpcsc_config(request) + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_rule.Rule()) + await client.update_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14414,35 +15052,39 @@ async def test_get_vpcsc_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "rule.name=name_value", ) in kw["metadata"] -def test_get_vpcsc_config_flattened(): +def test_update_rule_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vpcsc_config.VPCSCConfig() + call.return_value = gda_rule.Rule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_vpcsc_config( - name="name_value", + client.update_rule( + rule=gda_rule.Rule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rule + mock_val = gda_rule.Rule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_vpcsc_config_flattened_error(): +def test_update_rule_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14450,43 +15092,46 @@ def test_get_vpcsc_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_vpcsc_config( - vpcsc_config.GetVPCSCConfigRequest(), - name="name_value", + client.update_rule( + gda_rule.UpdateRuleRequest(), + rule=gda_rule.Rule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_vpcsc_config_flattened_async(): +async def test_update_rule_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vpcsc_config.VPCSCConfig() + call.return_value = gda_rule.Rule() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vpcsc_config.VPCSCConfig() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_rule.Rule()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_vpcsc_config( - name="name_value", + response = await client.update_rule( + rule=gda_rule.Rule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].rule + mock_val = gda_rule.Rule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_vpcsc_config_flattened_error_async(): +async def test_update_rule_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -14494,20 +15139,21 @@ async def test_get_vpcsc_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_vpcsc_config( - vpcsc_config.GetVPCSCConfigRequest(), - name="name_value", + await client.update_rule( + gda_rule.UpdateRuleRequest(), + rule=gda_rule.Rule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - gda_vpcsc_config.UpdateVPCSCConfigRequest, + rule.DeleteRuleRequest, dict, ], ) -def test_update_vpcsc_config(request_type, transport: str = "grpc"): +def test_delete_rule(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14518,29 +15164,22 @@ def test_update_vpcsc_config(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_vpcsc_config.VPCSCConfig( - name="name_value", - vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, - ) - response = client.update_vpcsc_config(request) + call.return_value = None + response = client.delete_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + request = rule.DeleteRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gda_vpcsc_config.VPCSCConfig) - assert response.name == "name_value" - assert response.vpcsc_policy == gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY + assert response is None -def test_update_vpcsc_config_non_empty_request_with_auto_populated_field(): +def test_delete_rule_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( @@ -14551,22 +15190,24 @@ def test_update_vpcsc_config_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + request = rule.DeleteRuleRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_vpcsc_config(request=request) + client.delete_rule(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gda_vpcsc_config.UpdateVPCSCConfigRequest() + assert args[0] == rule.DeleteRuleRequest( + name="name_value", + ) -def test_update_vpcsc_config_use_cached_wrapped_rpc(): +def test_delete_rule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14580,25 +15221,21 @@ def test_update_vpcsc_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_vpcsc_config in client._transport._wrapped_methods - ) + assert client._transport.delete_rule in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_vpcsc_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_rule] = mock_rpc request = {} - client.update_vpcsc_config(request) + client.delete_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_vpcsc_config(request) + client.delete_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14606,7 +15243,7 @@ def test_update_vpcsc_config_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_vpcsc_config_async_use_cached_wrapped_rpc( +async def test_delete_rule_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14623,7 +15260,7 @@ async def test_update_vpcsc_config_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_vpcsc_config + client._client._transport.delete_rule in client._client._transport._wrapped_methods ) @@ -14631,16 +15268,16 @@ async def test_update_vpcsc_config_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_vpcsc_config + client._client._transport.delete_rule ] = mock_rpc request = {} - await client.update_vpcsc_config(request) + await client.delete_rule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.update_vpcsc_config(request) + await client.delete_rule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14648,9 +15285,8 @@ async def test_update_vpcsc_config_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_vpcsc_config_async( - transport: str = "grpc_asyncio", - request_type=gda_vpcsc_config.UpdateVPCSCConfigRequest, +async def test_delete_rule_async( + transport: str = "grpc_asyncio", request_type=rule.DeleteRuleRequest ): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), @@ -14662,52 +15298,41 @@ async def test_update_vpcsc_config_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_vpcsc_config.VPCSCConfig( - name="name_value", - vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, - ) - ) - response = await client.update_vpcsc_config(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + request = rule.DeleteRuleRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gda_vpcsc_config.VPCSCConfig) - assert response.name == "name_value" - assert response.vpcsc_policy == gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY + assert response is None @pytest.mark.asyncio -async def test_update_vpcsc_config_async_from_dict(): - await test_update_vpcsc_config_async(request_type=dict) +async def test_delete_rule_async_from_dict(): + await test_delete_rule_async(request_type=dict) -def test_update_vpcsc_config_field_headers(): +def test_delete_rule_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + request = rule.DeleteRuleRequest() - request.vpcsc_config.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: - call.return_value = gda_vpcsc_config.VPCSCConfig() - client.update_vpcsc_config(request) + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: + call.return_value = None + client.delete_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14718,30 +15343,26 @@ def test_update_vpcsc_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "vpcsc_config.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_vpcsc_config_field_headers_async(): +async def test_delete_rule_field_headers_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + request = rule.DeleteRuleRequest() - request.vpcsc_config.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_vpcsc_config.VPCSCConfig() - ) - await client.update_vpcsc_config(request) + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14752,41 +15373,35 @@ async def test_update_vpcsc_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "vpcsc_config.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_vpcsc_config_flattened(): +def test_delete_rule_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_vpcsc_config.VPCSCConfig() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_vpcsc_config( - vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].vpcsc_config - mock_val = gda_vpcsc_config.VPCSCConfig(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_vpcsc_config_flattened_error(): +def test_delete_rule_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14794,50 +15409,41 @@ def test_update_vpcsc_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_vpcsc_config( - gda_vpcsc_config.UpdateVPCSCConfigRequest(), - vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_rule( + rule.DeleteRuleRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_vpcsc_config_flattened_async(): +async def test_delete_rule_flattened_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gda_vpcsc_config.VPCSCConfig() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_vpcsc_config.VPCSCConfig() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_vpcsc_config( - vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.delete_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].vpcsc_config - mock_val = gda_vpcsc_config.VPCSCConfig(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_vpcsc_config_flattened_error_async(): +async def test_delete_rule_flattened_error_async(): client = ArtifactRegistryAsyncClient( credentials=async_anonymous_credentials(), ) @@ -14845,20 +15451,85 @@ async def test_update_vpcsc_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_vpcsc_config( - gda_vpcsc_config.UpdateVPCSCConfigRequest(), - vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.delete_rule( + rule.DeleteRuleRequest(), + name="name_value", ) -def test_list_docker_images_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + +def test_set_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14866,261 +15537,260 @@ def test_list_docker_images_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_docker_images in client._transport._wrapped_methods - ) + assert client._transport.set_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_docker_images - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc request = {} - client.list_docker_images(request) + client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_docker_images(request) + client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_docker_images_rest_required_fields( - request_type=artifact.ListDockerImagesRequest, +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.set_iam_policy + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_docker_images._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_iam_policy + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.set_iam_policy(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_docker_images._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + await client.set_iam_policy(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = artifact.ListDockerImagesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) - # Convert return value to protobuf type - return_value = artifact.ListDockerImagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - response = client.list_docker_images(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) -def test_list_docker_images_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_set_iam_policy_field_headers(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_docker_images._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource_value" -def test_list_docker_images_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.ListDockerImagesResponse() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.ListDockerImagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.list_docker_images(**mock_args) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*}/dockerImages" - % client.transport._host, - args[1], - ) + request.resource = "resource_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) -def test_list_docker_images_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_docker_images( - artifact.ListDockerImagesRequest(), - parent="parent_value", - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] -def test_list_docker_images_rest_pager(transport: str = "rest"): +def test_set_iam_policy_from_dict_foreign(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - artifact.ListDockerImagesResponse( - docker_images=[ - artifact.DockerImage(), - artifact.DockerImage(), - artifact.DockerImage(), - ], - next_page_token="abc", - ), - artifact.ListDockerImagesResponse( - docker_images=[], - next_page_token="def", - ), - artifact.ListDockerImagesResponse( - docker_images=[ - artifact.DockerImage(), - ], - next_page_token="ghi", - ), - artifact.ListDockerImagesResponse( - docker_images=[ - artifact.DockerImage(), - artifact.DockerImage(), - ], - ), + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) - # Two responses for two calls - response = response + response + response = client.get_iam_policy(request) - # Wrap the values into proper Response objs - response = tuple(artifact.ListDockerImagesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - pager = client.list_docker_images(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, artifact.DockerImage) for i in results) +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - pages = list(client.list_docker_images(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) -def test_get_docker_image_rest_use_cached_wrapped_rpc(): + +def test_get_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -15128,179 +15798,261 @@ def test_get_docker_image_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_docker_image in client._transport._wrapped_methods + assert client._transport.get_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_docker_image + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_iam_policy ] = mock_rpc request = {} - client.get_docker_image(request) + await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_docker_image(request) + await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_docker_image_rest_required_fields( - request_type=artifact.GetDockerImageRequest, +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest ): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_docker_image._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request - jsonified_request["name"] = "name_value" + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_docker_image._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + +def test_get_iam_policy_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = artifact.DockerImage() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() - # Convert return value to protobuf type - return_value = artifact.DockerImage.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.resource = "resource_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) - response = client.get_docker_image(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] -def test_get_docker_image_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.get_docker_image._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) -def test_get_docker_image_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.DockerImage() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/dockerImages/sample4" - } +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.DockerImage.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) - client.get_docker_image(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/dockerImages/*}" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] -def test_get_docker_image_rest_flattened_error(transport: str = "rest"): +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_docker_image( - artifact.GetDockerImageRequest(), - name="name_value", + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", ) -def test_list_maven_artifacts_rest_use_cached_wrapped_rpc(): +def test_test_iam_permissions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -15309,7 +16061,7 @@ def test_list_maven_artifacts_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_maven_artifacts in client._transport._wrapped_methods + client._transport.test_iam_permissions in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -15318,251 +16070,271 @@ def test_list_maven_artifacts_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_maven_artifacts + client._transport.test_iam_permissions ] = mock_rpc - request = {} - client.list_maven_artifacts(request) + client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_maven_artifacts(request) + client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_maven_artifacts_rest_required_fields( - request_type=artifact.ListMavenArtifactsRequest, +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.test_iam_permissions + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_maven_artifacts._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.test_iam_permissions + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.test_iam_permissions(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_maven_artifacts._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + await client.test_iam_permissions(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = artifact.ListMavenArtifactsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) - # Convert return value to protobuf type - return_value = artifact.ListMavenArtifactsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] - response = client.list_maven_artifacts(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) -def test_list_maven_artifacts_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_test_iam_permissions_field_headers(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_maven_artifacts._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource_value" -def test_list_maven_artifacts_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.ListMavenArtifactsResponse() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.ListMavenArtifactsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.list_maven_artifacts(**mock_args) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*}/mavenArtifacts" - % client.transport._host, - args[1], + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] -def test_list_maven_artifacts_rest_flattened_error(transport: str = "rest"): + +def test_test_iam_permissions_from_dict_foreign(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_maven_artifacts( - artifact.ListMavenArtifactsRequest(), - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } ) + call.assert_called() -def test_list_maven_artifacts_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + settings.GetProjectSettingsRequest, + dict, + ], +) +def test_get_project_settings(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - artifact.ListMavenArtifactsResponse( - maven_artifacts=[ - artifact.MavenArtifact(), - artifact.MavenArtifact(), - artifact.MavenArtifact(), - ], - next_page_token="abc", - ), - artifact.ListMavenArtifactsResponse( - maven_artifacts=[], - next_page_token="def", - ), - artifact.ListMavenArtifactsResponse( - maven_artifacts=[ - artifact.MavenArtifact(), - ], - next_page_token="ghi", - ), - artifact.ListMavenArtifactsResponse( - maven_artifacts=[ - artifact.MavenArtifact(), - artifact.MavenArtifact(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - artifact.ListMavenArtifactsResponse.to_json(x) for x in response + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + response = client.get_project_settings(request) - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = settings.GetProjectSettingsRequest() + assert args[0] == request - pager = client.list_maven_artifacts(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, settings.ProjectSettings) + assert response.name == "name_value" + assert ( + response.legacy_redirection_state + == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED + ) + assert response.pull_percent == 1293 - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, artifact.MavenArtifact) for i in results) - pages = list(client.list_maven_artifacts(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_project_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = settings.GetProjectSettingsRequest( + name="name_value", + ) -def test_get_maven_artifact_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_project_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == settings.GetProjectSettingsRequest( + name="name_value", + ) + + +def test_get_project_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -15571,7 +16343,7 @@ def test_get_maven_artifact_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_maven_artifact in client._transport._wrapped_methods + client._transport.get_project_settings in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -15580,429 +16352,340 @@ def test_get_maven_artifact_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_maven_artifact + client._transport.get_project_settings ] = mock_rpc - request = {} - client.get_maven_artifact(request) + client.get_project_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_maven_artifact(request) + client.get_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_maven_artifact_rest_required_fields( - request_type=artifact.GetMavenArtifactRequest, +@pytest.mark.asyncio +async def test_get_project_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_project_settings + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_maven_artifact._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_project_settings + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_project_settings(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_maven_artifact._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.get_project_settings(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_get_project_settings_async( + transport: str = "grpc_asyncio", request_type=settings.GetProjectSettingsRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = artifact.MavenArtifact() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, + ) + ) + response = await client.get_project_settings(request) - # Convert return value to protobuf type - return_value = artifact.MavenArtifact.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = settings.GetProjectSettingsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, settings.ProjectSettings) + assert response.name == "name_value" + assert ( + response.legacy_redirection_state + == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED + ) + assert response.pull_percent == 1293 - response = client.get_maven_artifact(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_get_project_settings_async_from_dict(): + await test_get_project_settings_async(request_type=dict) -def test_get_maven_artifact_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_get_project_settings_field_headers(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_maven_artifact._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = settings.GetProjectSettingsRequest() + request.name = "name_value" -def test_get_maven_artifact_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + call.return_value = settings.ProjectSettings() + client.get_project_settings(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.MavenArtifact() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/mavenArtifacts/sample4" - } + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.MavenArtifact.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +@pytest.mark.asyncio +async def test_get_project_settings_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.get_maven_artifact(**mock_args) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = settings.GetProjectSettingsRequest() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/mavenArtifacts/*}" - % client.transport._host, - args[1], + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings() ) + await client.get_project_settings(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_get_maven_artifact_rest_flattened_error(transport: str = "rest"): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_project_settings_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_maven_artifact( - artifact.GetMavenArtifactRequest(), - name="name_value", - ) - - -def test_list_npm_packages_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_npm_packages in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = settings.ProjectSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_project_settings( + name="name_value", ) - client._transport._wrapped_methods[ - client._transport.list_npm_packages - ] = mock_rpc - - request = {} - client.list_npm_packages(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_npm_packages(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_npm_packages_rest_required_fields( - request_type=artifact.ListNpmPackagesRequest, -): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_npm_packages._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_npm_packages._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +def test_get_project_settings_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = artifact.ListNpmPackagesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = artifact.ListNpmPackagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_npm_packages(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_npm_packages_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_npm_packages._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_project_settings( + settings.GetProjectSettingsRequest(), + name="name_value", ) - & set(("parent",)) - ) -def test_list_npm_packages_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +@pytest.mark.asyncio +async def test_get_project_settings_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.ListNpmPackagesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = settings.ProjectSettings() - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_project_settings( + name="name_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.ListNpmPackagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_npm_packages(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*}/npmPackages" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_npm_packages_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_project_settings_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_npm_packages( - artifact.ListNpmPackagesRequest(), - parent="parent_value", + await client.get_project_settings( + settings.GetProjectSettingsRequest(), + name="name_value", ) -def test_list_npm_packages_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + settings.UpdateProjectSettingsRequest, + dict, + ], +) +def test_update_project_settings(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - artifact.ListNpmPackagesResponse( - npm_packages=[ - artifact.NpmPackage(), - artifact.NpmPackage(), - artifact.NpmPackage(), - ], - next_page_token="abc", - ), - artifact.ListNpmPackagesResponse( - npm_packages=[], - next_page_token="def", - ), - artifact.ListNpmPackagesResponse( - npm_packages=[ - artifact.NpmPackage(), - ], - next_page_token="ghi", - ), - artifact.ListNpmPackagesResponse( - npm_packages=[ - artifact.NpmPackage(), - artifact.NpmPackage(), - ], - ), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, ) - # Two responses for two calls - response = response + response + response = client.update_project_settings(request) - # Wrap the values into proper Response objs - response = tuple(artifact.ListNpmPackagesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = settings.UpdateProjectSettingsRequest() + assert args[0] == request - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the response is the type that we expect. + assert isinstance(response, settings.ProjectSettings) + assert response.name == "name_value" + assert ( + response.legacy_redirection_state + == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED + ) + assert response.pull_percent == 1293 - pager = client.list_npm_packages(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, artifact.NpmPackage) for i in results) +def test_update_project_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - pages = list(client.list_npm_packages(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = settings.UpdateProjectSettingsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_project_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == settings.UpdateProjectSettingsRequest() -def test_get_npm_package_rest_use_cached_wrapped_rpc(): +def test_update_project_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -16010,439 +16693,356 @@ def test_get_npm_package_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_npm_package in client._transport._wrapped_methods + assert ( + client._transport.update_project_settings + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_npm_package] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.update_project_settings + ] = mock_rpc request = {} - client.get_npm_package(request) + client.update_project_settings(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_npm_package(request) + client.update_project_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_npm_package_rest_required_fields( - request_type=artifact.GetNpmPackageRequest, +@pytest.mark.asyncio +async def test_update_project_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_npm_package._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify required fields with default values are now present + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - jsonified_request["name"] = "name_value" + # Ensure method has been cached + assert ( + client._client._transport.update_project_settings + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_npm_package._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_project_settings + ] = mock_rpc - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + request = {} + await client.update_project_settings(request) - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Designate an appropriate value for the returned response. - return_value = artifact.NpmPackage() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + await client.update_project_settings(request) - response_value = Response() - response_value.status_code = 200 + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Convert return value to protobuf type - return_value = artifact.NpmPackage.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +@pytest.mark.asyncio +async def test_update_project_settings_async( + transport: str = "grpc_asyncio", request_type=settings.UpdateProjectSettingsRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response = client.get_npm_package(request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, + ) + ) + response = await client.update_project_settings(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = settings.UpdateProjectSettingsRequest() + assert args[0] == request -def test_get_npm_package_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials + # Establish that the response is the type that we expect. + assert isinstance(response, settings.ProjectSettings) + assert response.name == "name_value" + assert ( + response.legacy_redirection_state + == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED ) + assert response.pull_percent == 1293 - unset_fields = transport.get_npm_package._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) +@pytest.mark.asyncio +async def test_update_project_settings_async_from_dict(): + await test_update_project_settings_async(request_type=dict) -def test_get_npm_package_rest_flattened(): + +def test_update_project_settings_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.NpmPackage() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = settings.UpdateProjectSettingsRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/npmPackages/sample4" - } + request.project_settings.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + call.return_value = settings.ProjectSettings() + client.update_project_settings(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.NpmPackage.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_npm_package(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/npmPackages/*}" - % client.transport._host, - args[1], - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_settings.name=name_value", + ) in kw["metadata"] -def test_get_npm_package_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_update_project_settings_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_npm_package( - artifact.GetNpmPackageRequest(), - name="name_value", - ) - - -def test_list_python_packages_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = settings.UpdateProjectSettingsRequest() - # Ensure method has been cached - assert ( - client._transport.list_python_packages in client._transport._wrapped_methods - ) + request.project_settings.name = "name_value" - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings() ) - client._transport._wrapped_methods[ - client._transport.list_python_packages - ] = mock_rpc - - request = {} - client.list_python_packages(request) + await client.update_project_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_python_packages(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_python_packages_rest_required_fields( - request_type=artifact.ListPythonPackagesRequest, -): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_python_packages._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_python_packages._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_settings.name=name_value", + ) in kw["metadata"] - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +def test_update_project_settings_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = artifact.ListPythonPackagesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = artifact.ListPythonPackagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_python_packages(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = settings.ProjectSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_project_settings( + project_settings=settings.ProjectSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_settings + mock_val = settings.ProjectSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_list_python_packages_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_update_project_settings_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_python_packages._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_project_settings( + settings.UpdateProjectSettingsRequest(), + project_settings=settings.ProjectSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - & set(("parent",)) - ) -def test_list_python_packages_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +@pytest.mark.asyncio +async def test_update_project_settings_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.ListPythonPackagesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = settings.ProjectSettings() - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_project_settings( + project_settings=settings.ProjectSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.ListPythonPackagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_python_packages(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*}/pythonPackages" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_settings + mock_val = settings.ProjectSettings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_list_python_packages_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_update_project_settings_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_python_packages( - artifact.ListPythonPackagesRequest(), - parent="parent_value", + await client.update_project_settings( + settings.UpdateProjectSettingsRequest(), + project_settings=settings.ProjectSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_python_packages_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + vpcsc_config.GetVPCSCConfigRequest, + dict, + ], +) +def test_get_vpcsc_config(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - artifact.ListPythonPackagesResponse( - python_packages=[ - artifact.PythonPackage(), - artifact.PythonPackage(), - artifact.PythonPackage(), - ], - next_page_token="abc", - ), - artifact.ListPythonPackagesResponse( - python_packages=[], - next_page_token="def", - ), - artifact.ListPythonPackagesResponse( - python_packages=[ - artifact.PythonPackage(), - ], - next_page_token="ghi", - ), - artifact.ListPythonPackagesResponse( - python_packages=[ - artifact.PythonPackage(), - artifact.PythonPackage(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - artifact.ListPythonPackagesResponse.to_json(x) for x in response + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = vpcsc_config.VPCSCConfig( + name="name_value", + vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + response = client.get_vpcsc_config(request) - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpcsc_config.GetVPCSCConfigRequest() + assert args[0] == request - pager = client.list_python_packages(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, vpcsc_config.VPCSCConfig) + assert response.name == "name_value" + assert response.vpcsc_policy == vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, artifact.PythonPackage) for i in results) - pages = list(client.list_python_packages(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_vpcsc_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpcsc_config.GetVPCSCConfigRequest( + name="name_value", + ) -def test_get_python_package_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_vpcsc_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpcsc_config.GetVPCSCConfigRequest( + name="name_value", + ) + + +def test_get_vpcsc_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -16450,9 +17050,7 @@ def test_get_python_package_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_python_package in client._transport._wrapped_methods - ) + assert client._transport.get_vpcsc_config in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16460,215 +17058,320 @@ def test_get_python_package_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_python_package + client._transport.get_vpcsc_config ] = mock_rpc - request = {} - client.get_python_package(request) + client.get_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_python_package(request) + client.get_vpcsc_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_python_package_rest_required_fields( - request_type=artifact.GetPythonPackageRequest, +@pytest.mark.asyncio +async def test_get_vpcsc_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_vpcsc_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_vpcsc_config + ] = mock_rpc + + request = {} + await client.get_vpcsc_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_vpcsc_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_vpcsc_config_async( + transport: str = "grpc_asyncio", request_type=vpcsc_config.GetVPCSCConfigRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_python_package._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpcsc_config.VPCSCConfig( + name="name_value", + vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + ) + ) + response = await client.get_vpcsc_config(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpcsc_config.GetVPCSCConfigRequest() + assert args[0] == request - jsonified_request["name"] = "name_value" + # Establish that the response is the type that we expect. + assert isinstance(response, vpcsc_config.VPCSCConfig) + assert response.name == "name_value" + assert response.vpcsc_policy == vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_python_package._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_get_vpcsc_config_async_from_dict(): + await test_get_vpcsc_config_async(request_type=dict) + +def test_get_vpcsc_config_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = artifact.PythonPackage() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpcsc_config.GetVPCSCConfigRequest() - # Convert return value to protobuf type - return_value = artifact.PythonPackage.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + call.return_value = vpcsc_config.VPCSCConfig() + client.get_vpcsc_config(request) - response = client.get_python_package(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_get_python_package_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_vpcsc_config_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.get_python_package._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpcsc_config.GetVPCSCConfigRequest() + request.name = "name_value" -def test_get_python_package_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpcsc_config.VPCSCConfig() + ) + await client.get_vpcsc_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_vpcsc_config_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = artifact.PythonPackage() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/pythonPackages/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = vpcsc_config.VPCSCConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_vpcsc_config( name="name_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = artifact.PythonPackage.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_python_package(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/pythonPackages/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_get_python_package_rest_flattened_error(transport: str = "rest"): +def test_get_vpcsc_config_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_python_package( - artifact.GetPythonPackageRequest(), + client.get_vpcsc_config( + vpcsc_config.GetVPCSCConfigRequest(), name="name_value", ) -def test_import_apt_artifacts_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_get_vpcsc_config_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = vpcsc_config.VPCSCConfig() - # Ensure method has been cached - assert ( - client._transport.import_apt_artifacts in client._transport._wrapped_methods + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpcsc_config.VPCSCConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_vpcsc_config( + name="name_value", ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_vpcsc_config_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_vpcsc_config( + vpcsc_config.GetVPCSCConfigRequest(), + name="name_value", ) - client._transport._wrapped_methods[ - client._transport.import_apt_artifacts - ] = mock_rpc - request = {} - client.import_apt_artifacts(request) + +@pytest.mark.parametrize( + "request_type", + [ + gda_vpcsc_config.UpdateVPCSCConfigRequest, + dict, + ], +) +def test_update_vpcsc_config(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gda_vpcsc_config.VPCSCConfig( + name="name_value", + vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + ) + response = client.update_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + assert args[0] == request - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # Establish that the response is the type that we expect. + assert isinstance(response, gda_vpcsc_config.VPCSCConfig) + assert response.name == "name_value" + assert response.vpcsc_policy == gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY - client.import_apt_artifacts(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +def test_update_vpcsc_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_vpcsc_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gda_vpcsc_config.UpdateVPCSCConfigRequest() -def test_import_yum_artifacts_rest_use_cached_wrapped_rpc(): +def test_update_vpcsc_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -16677,7 +17380,7 @@ def test_import_yum_artifacts_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.import_yum_artifacts in client._transport._wrapped_methods + client._transport.update_vpcsc_config in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16686,33 +17389,31 @@ def test_import_yum_artifacts_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.import_yum_artifacts + client._transport.update_vpcsc_config ] = mock_rpc - request = {} - client.import_yum_artifacts(request) + client.update_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.import_yum_artifacts(request) + client.update_vpcsc_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_repositories_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_update_vpcsc_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) # Should wrap all calls on client creation @@ -16720,255 +17421,305 @@ def test_list_repositories_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_repositories in client._transport._wrapped_methods + assert ( + client._client._transport.update_vpcsc_config + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_repositories + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_vpcsc_config ] = mock_rpc request = {} - client.list_repositories(request) + await client.update_vpcsc_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_repositories(request) + await client.update_vpcsc_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_repositories_rest_required_fields( - request_type=repository.ListRepositoriesRequest, +@pytest.mark.asyncio +async def test_update_vpcsc_config_async( + transport: str = "grpc_asyncio", + request_type=gda_vpcsc_config.UpdateVPCSCConfigRequest, ): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_vpcsc_config.VPCSCConfig( + name="name_value", + vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + ) + ) + response = await client.update_vpcsc_config(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + assert args[0] == request - jsonified_request["parent"] = "parent_value" + # Establish that the response is the type that we expect. + assert isinstance(response, gda_vpcsc_config.VPCSCConfig) + assert response.name == "name_value" + assert response.vpcsc_policy == gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_repositories._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +@pytest.mark.asyncio +async def test_update_vpcsc_config_async_from_dict(): + await test_update_vpcsc_config_async(request_type=dict) + +def test_update_vpcsc_config_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repository.ListRepositoriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = repository.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_repositories(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_vpcsc_config.UpdateVPCSCConfigRequest() - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + request.vpcsc_config.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + call.return_value = gda_vpcsc_config.VPCSCConfig() + client.update_vpcsc_config(request) -def test_list_repositories_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.list_repositories._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vpcsc_config.name=name_value", + ) in kw["metadata"] -def test_list_repositories_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +@pytest.mark.asyncio +async def test_update_vpcsc_config_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = repository.ListRepositoriesResponse() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_vpcsc_config.UpdateVPCSCConfigRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.vpcsc_config.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_vpcsc_config.VPCSCConfig() ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = repository.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + await client.update_vpcsc_config(request) - client.list_repositories(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/repositories" - % client.transport._host, - args[1], - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vpcsc_config.name=name_value", + ) in kw["metadata"] -def test_list_repositories_rest_flattened_error(transport: str = "rest"): +def test_update_vpcsc_config_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_repositories( - repository.ListRepositoriesRequest(), - parent="parent_value", - ) - - -def test_list_repositories_rest_pager(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gda_vpcsc_config.VPCSCConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_vpcsc_config( + vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].vpcsc_config + mock_val = gda_vpcsc_config.VPCSCConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_vpcsc_config_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - repository.ListRepositoriesResponse( - repositories=[ - repository.Repository(), - repository.Repository(), - repository.Repository(), - ], - next_page_token="abc", - ), - repository.ListRepositoriesResponse( - repositories=[], - next_page_token="def", - ), - repository.ListRepositoriesResponse( - repositories=[ - repository.Repository(), - ], - next_page_token="ghi", - ), - repository.ListRepositoriesResponse( - repositories=[ - repository.Repository(), - repository.Repository(), - ], - ), + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_vpcsc_config( + gda_vpcsc_config.UpdateVPCSCConfigRequest(), + vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple( - repository.ListRepositoriesResponse.to_json(x) for x in response + +@pytest.mark.asyncio +async def test_update_vpcsc_config_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gda_vpcsc_config.VPCSCConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_vpcsc_config.VPCSCConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_vpcsc_config( + vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].vpcsc_config + mock_val = gda_vpcsc_config.VPCSCConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val - pager = client.list_repositories(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repository.Repository) for i in results) +@pytest.mark.asyncio +async def test_update_vpcsc_config_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) - pages = list(client.list_repositories(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_vpcsc_config( + gda_vpcsc_config.UpdateVPCSCConfigRequest(), + vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) -def test_get_repository_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + gda_package.UpdatePackageRequest, + dict, + ], +) +def test_update_package(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_package.Package( + name="name_value", + display_name="display_name_value", + ) + response = client.update_package(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gda_package.UpdatePackageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gda_package.Package) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_package_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gda_package.UpdatePackageRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_package(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gda_package.UpdatePackageRequest() + + +def test_update_package_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -16976,177 +17727,334 @@ def test_get_repository_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_repository in client._transport._wrapped_methods + assert client._transport.update_package in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_repository] = mock_rpc - + client._transport._wrapped_methods[client._transport.update_package] = mock_rpc request = {} - client.get_repository(request) + client.update_package(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_repository(request) + client.update_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_repository_rest_required_fields( - request_type=repository.GetRepositoryRequest, +@pytest.mark.asyncio +async def test_update_package_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_package + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_package + ] = mock_rpc + + request = {} + await client.update_package(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_package(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_package_async( + transport: str = "grpc_asyncio", request_type=gda_package.UpdatePackageRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_package.Package( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_package(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gda_package.UpdatePackageRequest() + assert args[0] == request - jsonified_request["name"] = "name_value" + # Establish that the response is the type that we expect. + assert isinstance(response, gda_package.Package) + assert response.name == "name_value" + assert response.display_name == "display_name_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_update_package_async_from_dict(): + await test_update_package_async(request_type=dict) + +def test_update_package_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repository.Repository() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_package.UpdatePackageRequest() - # Convert return value to protobuf type - return_value = repository.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.package.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + call.return_value = gda_package.Package() + client.update_package(request) - response = client.get_repository(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "package.name=name_value", + ) in kw["metadata"] -def test_get_repository_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_update_package_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.get_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_package.UpdatePackageRequest() + request.package.name = "name_value" -def test_get_repository_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_package.Package()) + await client.update_package(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "package.name=name_value", + ) in kw["metadata"] + + +def test_update_package_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = repository.Repository() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_package.Package() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_package( + package=gda_package.Package(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].package + mock_val = gda_package.Package(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + +def test_update_package_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_package( + gda_package.UpdatePackageRequest(), + package=gda_package.Package(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = repository.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.get_repository(**mock_args) +@pytest.mark.asyncio +async def test_update_package_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gda_package.Package() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gda_package.Package()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_package( + package=gda_package.Package(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].package + mock_val = gda_package.Package(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_package_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_package( + gda_package.UpdatePackageRequest(), + package=gda_package.Package(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_repository_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + attachment.ListAttachmentsRequest, + dict, + ], +) +def test_list_attachments(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_repository( - repository.GetRepositoryRequest(), - name="name_value", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment.ListAttachmentsResponse( + next_page_token="next_page_token_value", ) + response = client.list_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = attachment.ListAttachmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAttachmentsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_repository_rest_use_cached_wrapped_rpc(): +def test_list_attachments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = attachment.ListAttachmentsRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_attachments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == attachment.ListAttachmentsRequest( + parent="parent_value", + filter="filter_value", + page_token="page_token_value", + ) + + +def test_list_attachments_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -17154,7 +18062,7 @@ def test_create_repository_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_repository in client._transport._wrapped_methods + assert client._transport.list_attachments in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -17162,314 +18070,520 @@ def test_create_repository_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_repository + client._transport.list_attachments ] = mock_rpc - request = {} - client.create_repository(request) + client.list_attachments(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_repository(request) + client.list_attachments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_repository_rest_required_fields( - request_type=gda_repository.CreateRepositoryRequest, +@pytest.mark.asyncio +async def test_list_attachments_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["repository_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped - assert "repositoryId" not in jsonified_request + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.list_attachments + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present - assert "repositoryId" in jsonified_request - assert jsonified_request["repositoryId"] == request_init["repository_id"] + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_attachments + ] = mock_rpc - jsonified_request["parent"] = "parent_value" - jsonified_request["repositoryId"] = "repository_id_value" + request = {} + await client.list_attachments(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_repository._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("repository_id",)) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "repositoryId" in jsonified_request - assert jsonified_request["repositoryId"] == "repository_id_value" + await client.list_attachments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_attachments_async( + transport: str = "grpc_asyncio", request_type=attachment.ListAttachmentsRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.ListAttachmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = attachment.ListAttachmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAttachmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_attachments_async_from_dict(): + await test_list_attachments_async(request_type=dict) + +def test_list_attachments_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment.ListAttachmentsRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + call.return_value = attachment.ListAttachmentsResponse() + client.list_attachments(request) - response = client.create_repository(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "repositoryId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_create_repository_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_attachments_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_repository._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("repositoryId",)) - & set( - ( - "parent", - "repositoryId", - "repository", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment.ListAttachmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.ListAttachmentsResponse() ) - ) + await client.list_attachments(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_create_repository_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_attachments_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment.ListAttachmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_attachments( parent="parent_value", - repository=gda_repository.Repository( - maven_config=gda_repository.Repository.MavenRepositoryConfig( - allow_snapshot_overwrites=True - ) - ), - repository_id="repository_id_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_repository(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/repositories" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_create_repository_rest_flattened_error(transport: str = "rest"): +def test_list_attachments_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_repository( - gda_repository.CreateRepositoryRequest(), + client.list_attachments( + attachment.ListAttachmentsRequest(), parent="parent_value", - repository=gda_repository.Repository( - maven_config=gda_repository.Repository.MavenRepositoryConfig( - allow_snapshot_overwrites=True - ) - ), - repository_id="repository_id_value", ) -def test_update_repository_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +@pytest.mark.asyncio +async def test_list_attachments_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment.ListAttachmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.ListAttachmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_attachments( + parent="parent_value", ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - # Ensure method has been cached - assert client._transport.update_repository in client._transport._wrapped_methods - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. +@pytest.mark.asyncio +async def test_list_attachments_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_attachments( + attachment.ListAttachmentsRequest(), + parent="parent_value", ) - client._transport._wrapped_methods[ - client._transport.update_repository - ] = mock_rpc - request = {} - client.update_repository(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +def test_list_attachments_pager(transport_name: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) - client.update_repository(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_attachments(request={}, retry=retry, timeout=timeout) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout -def test_update_repository_rest_flattened(): + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, attachment.Attachment) for i in results) + + +def test_list_attachments_pages(transport_name: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gda_repository.Repository() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_attachments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # get arguments that satisfy an http rule for this method - sample_request = { - "repository": { - "name": "projects/sample1/locations/sample2/repositories/sample3" - } - } - # get truthy value for each flattened field - mock_args = dict( - repository=gda_repository.Repository( - maven_config=gda_repository.Repository.MavenRepositoryConfig( - allow_snapshot_overwrites=True - ) +@pytest.mark.asyncio +async def test_list_attachments_async_pager(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_attachments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + attachment.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, ) - mock_args.update(sample_request) + async_pager = await client.list_attachments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gda_repository.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + assert len(responses) == 6 + assert all(isinstance(i, attachment.Attachment) for i in responses) - client.update_repository(**mock_args) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{repository.name=projects/*/locations/*/repositories/*}" - % client.transport._host, - args[1], +@pytest.mark.asyncio +async def test_list_attachments_async_pages(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_attachments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_attachments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_update_repository_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + attachment.GetAttachmentRequest, + dict, + ], +) +def test_get_attachment(request_type, transport: str = "grpc"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_repository( - gda_repository.UpdateRepositoryRequest(), - repository=gda_repository.Repository( - maven_config=gda_repository.Repository.MavenRepositoryConfig( - allow_snapshot_overwrites=True - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment.Attachment( + name="name_value", + target="target_value", + type_="type__value", + attachment_namespace="attachment_namespace_value", + files=["files_value"], + oci_version_name="oci_version_name_value", ) + response = client.get_attachment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = attachment.GetAttachmentRequest() + assert args[0] == request -def test_delete_repository_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, attachment.Attachment) + assert response.name == "name_value" + assert response.target == "target_value" + assert response.type_ == "type__value" + assert response.attachment_namespace == "attachment_namespace_value" + assert response.files == ["files_value"] + assert response.oci_version_name == "oci_version_name_value" + + +def test_get_attachment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = attachment.GetAttachmentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_attachment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == attachment.GetAttachmentRequest( + name="name_value", + ) + + +def test_get_attachment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -17477,178 +18591,335 @@ def test_delete_repository_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_repository in client._transport._wrapped_methods + assert client._transport.get_attachment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_repository - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_attachment] = mock_rpc request = {} - client.delete_repository(request) + client.get_attachment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_repository(request) + client.get_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_repository_rest_required_fields( - request_type=repository.DeleteRepositoryRequest, +@pytest.mark.asyncio +async def test_get_attachment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.get_attachment + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_attachment + ] = mock_rpc - jsonified_request["name"] = "name_value" + request = {} + await client.get_attachment(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + await client.get_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_attachment_async( + transport: str = "grpc_asyncio", request_type=attachment.GetAttachmentRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.Attachment( + name="name_value", + target="target_value", + type_="type__value", + attachment_namespace="attachment_namespace_value", + files=["files_value"], + oci_version_name="oci_version_name_value", + ) + ) + response = await client.get_attachment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = attachment.GetAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, attachment.Attachment) + assert response.name == "name_value" + assert response.target == "target_value" + assert response.type_ == "type__value" + assert response.attachment_namespace == "attachment_namespace_value" + assert response.files == ["files_value"] + assert response.oci_version_name == "oci_version_name_value" + + +@pytest.mark.asyncio +async def test_get_attachment_async_from_dict(): + await test_get_attachment_async(request_type=dict) + + +def test_get_attachment_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment.GetAttachmentRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + call.return_value = attachment.Attachment() + client.get_attachment(request) - response = client.delete_repository(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_delete_repository_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_attachment_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.delete_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment.GetAttachmentRequest() + request.name = "name_value" -def test_delete_repository_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.Attachment() + ) + await client.get_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_attachment_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment.Attachment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_attachment( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_get_attachment_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_attachment( + attachment.GetAttachmentRequest(), name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.delete_repository(**mock_args) +@pytest.mark.asyncio +async def test_get_attachment_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = attachment.Attachment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.Attachment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_attachment( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_delete_repository_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_attachment_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_repository( - repository.DeleteRepositoryRequest(), + await client.get_attachment( + attachment.GetAttachmentRequest(), name="name_value", ) -def test_list_packages_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + gda_attachment.CreateAttachmentRequest, + dict, + ], +) +def test_create_attachment(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gda_attachment.CreateAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_attachment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gda_attachment.CreateAttachmentRequest( + parent="parent_value", + attachment_id="attachment_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_attachment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gda_attachment.CreateAttachmentRequest( + parent="parent_value", + attachment_id="attachment_id_value", + ) + + +def test_create_attachment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -17656,253 +18927,362 @@ def test_list_packages_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_packages in client._transport._wrapped_methods + assert client._transport.create_attachment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_packages] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.create_attachment + ] = mock_rpc request = {} - client.list_packages(request) + client.create_attachment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_packages(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_packages_rest_required_fields(request_type=package.ListPackagesRequest): - transport_class = transports.ArtifactRegistryRestTransport +@pytest.mark.asyncio +async def test_create_attachment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.create_attachment + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_packages._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_attachment + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.create_attachment(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_packages._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_attachment_async( + transport: str = "grpc_asyncio", request_type=gda_attachment.CreateAttachmentRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gda_attachment.CreateAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_attachment_async_from_dict(): + await test_create_attachment_async(request_type=dict) + +def test_create_attachment_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = package.ListPackagesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_attachment.CreateAttachmentRequest() - # Convert return value to protobuf type - return_value = package.ListPackagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_attachment(request) - response = client.list_packages(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_list_packages_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_create_attachment_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.list_packages._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gda_attachment.CreateAttachmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - & set(("parent",)) - ) + await client.create_attachment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_list_packages_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_attachment_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = package.ListPackagesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_attachment( parent="parent_value", + attachment=gda_attachment.Attachment(name="name_value"), + attachment_id="attachment_id_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = package.ListPackagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_packages(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*}/packages" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].attachment + mock_val = gda_attachment.Attachment(name="name_value") + assert arg == mock_val + arg = args[0].attachment_id + mock_val = "attachment_id_value" + assert arg == mock_val -def test_list_packages_rest_flattened_error(transport: str = "rest"): +def test_create_attachment_flattened_error(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_packages( - package.ListPackagesRequest(), + client.create_attachment( + gda_attachment.CreateAttachmentRequest(), parent="parent_value", + attachment=gda_attachment.Attachment(name="name_value"), + attachment_id="attachment_id_value", ) -def test_list_packages_rest_pager(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_create_attachment_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - package.ListPackagesResponse( - packages=[ - package.Package(), - package.Package(), - package.Package(), - ], - next_page_token="abc", - ), - package.ListPackagesResponse( - packages=[], - next_page_token="def", - ), - package.ListPackagesResponse( - packages=[ - package.Package(), - ], - next_page_token="ghi", - ), - package.ListPackagesResponse( - packages=[ - package.Package(), - package.Package(), - ], - ), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_attachment( + parent="parent_value", + attachment=gda_attachment.Attachment(name="name_value"), + attachment_id="attachment_id_value", ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(package.ListPackagesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].attachment + mock_val = gda_attachment.Attachment(name="name_value") + assert arg == mock_val + arg = args[0].attachment_id + mock_val = "attachment_id_value" + assert arg == mock_val - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3" - } - pager = client.list_packages(request=sample_request) +@pytest.mark.asyncio +async def test_create_attachment_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, package.Package) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_attachment( + gda_attachment.CreateAttachmentRequest(), + parent="parent_value", + attachment=gda_attachment.Attachment(name="name_value"), + attachment_id="attachment_id_value", + ) - pages = list(client.list_packages(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + attachment.DeleteAttachmentRequest, + dict, + ], +) +def test_delete_attachment(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -def test_get_package_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = attachment.DeleteAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_attachment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = attachment.DeleteAttachmentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_attachment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == attachment.DeleteAttachmentRequest( + name="name_value", + ) + + +def test_delete_attachment_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -17910,169 +19290,271 @@ def test_get_package_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_package in client._transport._wrapped_methods + assert client._transport.delete_attachment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_package] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.delete_attachment + ] = mock_rpc request = {} - client.get_package(request) + client.delete_attachment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_package(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_package_rest_required_fields(request_type=package.GetPackageRequest): - transport_class = transports.ArtifactRegistryRestTransport +@pytest.mark.asyncio +async def test_delete_attachment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_attachment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_attachment + ] = mock_rpc + + request = {} + await client.delete_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_attachment_async( + transport: str = "grpc_asyncio", request_type=attachment.DeleteAttachmentRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_package._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_attachment(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = attachment.DeleteAttachmentRequest() + assert args[0] == request - jsonified_request["name"] = "name_value" + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_package._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_delete_attachment_async_from_dict(): + await test_delete_attachment_async(request_type=dict) + +def test_delete_attachment_field_headers(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = package.Package() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment.DeleteAttachmentRequest() - # Convert return value to protobuf type - return_value = package.Package.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_attachment(request) - response = client.get_package(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_get_package_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_attachment_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.get_package._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attachment.DeleteAttachmentRequest() + request.name = "name_value" -def test_get_package_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_attachment_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = package.Package() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_attachment( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_delete_attachment_flattened_error(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_attachment( + attachment.DeleteAttachmentRequest(), name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = package.Package.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - client.get_package(**mock_args) +@pytest.mark.asyncio +async def test_delete_attachment_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_attachment( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_get_package_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_delete_attachment_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_package( - package.GetPackageRequest(), + await client.delete_attachment( + attachment.DeleteAttachmentRequest(), name="name_value", ) -def test_delete_package_rest_use_cached_wrapped_rpc(): +def test_list_docker_images_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18086,37 +19568,39 @@ def test_delete_package_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_package in client._transport._wrapped_methods + assert ( + client._transport.list_docker_images in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_package] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_docker_images + ] = mock_rpc request = {} - client.delete_package(request) + client.list_docker_images(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_package(request) + client.list_docker_images(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_package_rest_required_fields(request_type=package.DeletePackageRequest): +def test_list_docker_images_rest_required_fields( + request_type=artifact.ListDockerImagesRequest, +): transport_class = transports.ArtifactRegistryRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18127,21 +19611,29 @@ def test_delete_package_rest_required_fields(request_type=package.DeletePackageR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_package._get_unset_required_fields(jsonified_request) + ).list_docker_images._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_package._get_unset_required_fields(jsonified_request) + ).list_docker_images._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18150,7 +19642,7 @@ def test_delete_package_rest_required_fields(request_type=package.DeletePackageR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = artifact.ListDockerImagesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18162,35 +19654,47 @@ def test_delete_package_rest_required_fields(request_type=package.DeletePackageR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.ListDockerImagesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_package(request) + response = client.list_docker_images(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_package_rest_unset_required_fields(): +def test_list_docker_images_rest_unset_required_fields(): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_package._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_docker_images._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_delete_package_rest_flattened(): +def test_list_docker_images_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18199,40 +19703,42 @@ def test_delete_package_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = artifact.ListDockerImagesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "parent": "projects/sample1/locations/sample2/repositories/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = artifact.ListDockerImagesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_package(**mock_args) + client.list_docker_images(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*}" + "%s/v1/{parent=projects/*/locations/*/repositories/*}/dockerImages" % client.transport._host, args[1], ) -def test_delete_package_rest_flattened_error(transport: str = "rest"): +def test_list_docker_images_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18241,111 +19747,16 @@ def test_delete_package_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_package( - package.DeletePackageRequest(), - name="name_value", - ) - - -def test_list_versions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_versions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + client.list_docker_images( + artifact.ListDockerImagesRequest(), + parent="parent_value", ) - client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc - - request = {} - client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_versions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 -def test_list_versions_rest_flattened(): +def test_list_docker_images_rest_pager(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = version.ListVersionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = version.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_versions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/versions" - % client.transport._host, - args[1], - ) - - -def test_list_versions_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_versions( - version.ListVersionsRequest(), - parent="parent_value", - ) - - -def test_list_versions_rest_pager(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport, ) # Mock the http request call within the method and fake a response. @@ -18354,28 +19765,28 @@ def test_list_versions_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - version.ListVersionsResponse( - versions=[ - version.Version(), - version.Version(), - version.Version(), + artifact.ListDockerImagesResponse( + docker_images=[ + artifact.DockerImage(), + artifact.DockerImage(), + artifact.DockerImage(), ], next_page_token="abc", ), - version.ListVersionsResponse( - versions=[], + artifact.ListDockerImagesResponse( + docker_images=[], next_page_token="def", ), - version.ListVersionsResponse( - versions=[ - version.Version(), + artifact.ListDockerImagesResponse( + docker_images=[ + artifact.DockerImage(), ], next_page_token="ghi", ), - version.ListVersionsResponse( - versions=[ - version.Version(), - version.Version(), + artifact.ListDockerImagesResponse( + docker_images=[ + artifact.DockerImage(), + artifact.DockerImage(), ], ), ) @@ -18383,7 +19794,7 @@ def test_list_versions_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(version.ListVersionsResponse.to_json(x) for x in response) + response = tuple(artifact.ListDockerImagesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -18391,213 +19802,21 @@ def test_list_versions_rest_pager(transport: str = "rest"): req.side_effect = return_values sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "parent": "projects/sample1/locations/sample2/repositories/sample3" } - pager = client.list_versions(request=sample_request) + pager = client.list_docker_images(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, version.Version) for i in results) + assert all(isinstance(i, artifact.DockerImage) for i in results) - pages = list(client.list_versions(request=sample_request).pages) + pages = list(client.list_docker_images(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_version] = mock_rpc - - request = {} - client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_version_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = version.Version() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = version.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_version_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_version( - version.GetVersionRequest(), - name="name_value", - ) - - -def test_delete_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc - - request = {} - client.delete_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_version_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_version_rest_flattened_error(transport: str = "rest"): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_version( - version.DeleteVersionRequest(), - name="name_value", - ) - - -def test_batch_delete_versions_rest_use_cached_wrapped_rpc(): +def test_get_docker_image_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18611,10 +19830,7 @@ def test_batch_delete_versions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.batch_delete_versions - in client._transport._wrapped_methods - ) + assert client._transport.get_docker_image in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -18622,33 +19838,29 @@ def test_batch_delete_versions_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.batch_delete_versions + client._transport.get_docker_image ] = mock_rpc request = {} - client.batch_delete_versions(request) + client.get_docker_image(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.batch_delete_versions(request) + client.get_docker_image(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_batch_delete_versions_rest_required_fields( - request_type=version.BatchDeleteVersionsRequest, +def test_get_docker_image_rest_required_fields( + request_type=artifact.GetDockerImageRequest, ): transport_class = transports.ArtifactRegistryRestTransport request_init = {} - request_init["names"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18659,21 +19871,21 @@ def test_batch_delete_versions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_delete_versions._get_unset_required_fields(jsonified_request) + ).get_docker_image._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["names"] = "names_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_delete_versions._get_unset_required_fields(jsonified_request) + ).get_docker_image._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "names" in jsonified_request - assert jsonified_request["names"] == "names_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18682,7 +19894,7 @@ def test_batch_delete_versions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = artifact.DockerImage() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18694,36 +19906,38 @@ def test_batch_delete_versions_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.DockerImage.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_delete_versions(request) + response = client.get_docker_image(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_delete_versions_rest_unset_required_fields(): +def test_get_docker_image_rest_unset_required_fields(): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_delete_versions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("names",))) + unset_fields = transport.get_docker_image._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_batch_delete_versions_rest_flattened(): +def test_get_docker_image_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18732,41 +19946,42 @@ def test_batch_delete_versions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = artifact.DockerImage() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/dockerImages/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - names=["names_value"], + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = artifact.DockerImage.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.batch_delete_versions(**mock_args) + client.get_docker_image(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/versions:batchDelete" + "%s/v1/{name=projects/*/locations/*/repositories/*/dockerImages/*}" % client.transport._host, args[1], ) -def test_batch_delete_versions_rest_flattened_error(transport: str = "rest"): +def test_get_docker_image_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18775,14 +19990,13 @@ def test_batch_delete_versions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.batch_delete_versions( - version.BatchDeleteVersionsRequest(), - parent="parent_value", - names=["names_value"], + client.get_docker_image( + artifact.GetDockerImageRequest(), + name="name_value", ) -def test_list_files_rest_use_cached_wrapped_rpc(): +def test_list_maven_artifacts_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18796,29 +20010,35 @@ def test_list_files_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_files in client._transport._wrapped_methods + assert ( + client._transport.list_maven_artifacts in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_files] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_maven_artifacts + ] = mock_rpc request = {} - client.list_files(request) + client.list_maven_artifacts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_files(request) + client.list_maven_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_files_rest_required_fields(request_type=file.ListFilesRequest): +def test_list_maven_artifacts_rest_required_fields( + request_type=artifact.ListMavenArtifactsRequest, +): transport_class = transports.ArtifactRegistryRestTransport request_init = {} @@ -18833,7 +20053,7 @@ def test_list_files_rest_required_fields(request_type=file.ListFilesRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_files._get_unset_required_fields(jsonified_request) + ).list_maven_artifacts._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18842,12 +20062,10 @@ def test_list_files_rest_required_fields(request_type=file.ListFilesRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_files._get_unset_required_fields(jsonified_request) + ).list_maven_artifacts._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", "page_size", "page_token", ) @@ -18865,7 +20083,7 @@ def test_list_files_rest_required_fields(request_type=file.ListFilesRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = file.ListFilesResponse() + return_value = artifact.ListMavenArtifactsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18886,30 +20104,28 @@ def test_list_files_rest_required_fields(request_type=file.ListFilesRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = file.ListFilesResponse.pb(return_value) + return_value = artifact.ListMavenArtifactsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_files(request) + response = client.list_maven_artifacts(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_files_rest_unset_required_fields(): +def test_list_maven_artifacts_rest_unset_required_fields(): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_files._get_unset_required_fields({}) + unset_fields = transport.list_maven_artifacts._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", "pageSize", "pageToken", ) @@ -18918,7 +20134,7 @@ def test_list_files_rest_unset_required_fields(): ) -def test_list_files_rest_flattened(): +def test_list_maven_artifacts_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18927,7 +20143,7 @@ def test_list_files_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = file.ListFilesResponse() + return_value = artifact.ListMavenArtifactsResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -18944,25 +20160,25 @@ def test_list_files_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = file.ListFilesResponse.pb(return_value) + return_value = artifact.ListMavenArtifactsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_files(**mock_args) + client.list_maven_artifacts(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*}/files" + "%s/v1/{parent=projects/*/locations/*/repositories/*}/mavenArtifacts" % client.transport._host, args[1], ) -def test_list_files_rest_flattened_error(transport: str = "rest"): +def test_list_maven_artifacts_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18971,13 +20187,13 @@ def test_list_files_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_files( - file.ListFilesRequest(), + client.list_maven_artifacts( + artifact.ListMavenArtifactsRequest(), parent="parent_value", ) -def test_list_files_rest_pager(transport: str = "rest"): +def test_list_maven_artifacts_rest_pager(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18989,28 +20205,28 @@ def test_list_files_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - file.ListFilesResponse( - files=[ - file.File(), - file.File(), - file.File(), + artifact.ListMavenArtifactsResponse( + maven_artifacts=[ + artifact.MavenArtifact(), + artifact.MavenArtifact(), + artifact.MavenArtifact(), ], next_page_token="abc", ), - file.ListFilesResponse( - files=[], + artifact.ListMavenArtifactsResponse( + maven_artifacts=[], next_page_token="def", ), - file.ListFilesResponse( - files=[ - file.File(), + artifact.ListMavenArtifactsResponse( + maven_artifacts=[ + artifact.MavenArtifact(), ], next_page_token="ghi", ), - file.ListFilesResponse( - files=[ - file.File(), - file.File(), + artifact.ListMavenArtifactsResponse( + maven_artifacts=[ + artifact.MavenArtifact(), + artifact.MavenArtifact(), ], ), ) @@ -19018,7 +20234,9 @@ def test_list_files_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(file.ListFilesResponse.to_json(x) for x in response) + response = tuple( + artifact.ListMavenArtifactsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -19029,18 +20247,18 @@ def test_list_files_rest_pager(transport: str = "rest"): "parent": "projects/sample1/locations/sample2/repositories/sample3" } - pager = client.list_files(request=sample_request) + pager = client.list_maven_artifacts(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, file.File) for i in results) + assert all(isinstance(i, artifact.MavenArtifact) for i in results) - pages = list(client.list_files(request=sample_request).pages) + pages = list(client.list_maven_artifacts(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_file_rest_use_cached_wrapped_rpc(): +def test_get_maven_artifact_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19054,29 +20272,35 @@ def test_get_file_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_file in client._transport._wrapped_methods + assert ( + client._transport.get_maven_artifact in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_file] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_maven_artifact + ] = mock_rpc request = {} - client.get_file(request) + client.get_maven_artifact(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_file(request) + client.get_maven_artifact(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_file_rest_required_fields(request_type=file.GetFileRequest): +def test_get_maven_artifact_rest_required_fields( + request_type=artifact.GetMavenArtifactRequest, +): transport_class = transports.ArtifactRegistryRestTransport request_init = {} @@ -19091,7 +20315,7 @@ def test_get_file_rest_required_fields(request_type=file.GetFileRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_file._get_unset_required_fields(jsonified_request) + ).get_maven_artifact._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19100,7 +20324,7 @@ def test_get_file_rest_required_fields(request_type=file.GetFileRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_file._get_unset_required_fields(jsonified_request) + ).get_maven_artifact._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19114,7 +20338,7 @@ def test_get_file_rest_required_fields(request_type=file.GetFileRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = file.File() + return_value = artifact.MavenArtifact() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19135,29 +20359,29 @@ def test_get_file_rest_required_fields(request_type=file.GetFileRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = file.File.pb(return_value) + return_value = artifact.MavenArtifact.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_file(request) + response = client.get_maven_artifact(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_file_rest_unset_required_fields(): +def test_get_maven_artifact_rest_unset_required_fields(): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_file._get_unset_required_fields({}) + unset_fields = transport.get_maven_artifact._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_file_rest_flattened(): +def test_get_maven_artifact_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19166,11 +20390,11 @@ def test_get_file_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = file.File() + return_value = artifact.MavenArtifact() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/mavenArtifacts/sample4" } # get truthy value for each flattened field @@ -19183,25 +20407,25 @@ def test_get_file_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = file.File.pb(return_value) + return_value = artifact.MavenArtifact.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_file(**mock_args) + client.get_maven_artifact(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/files/**}" + "%s/v1/{name=projects/*/locations/*/repositories/*/mavenArtifacts/*}" % client.transport._host, args[1], ) -def test_get_file_rest_flattened_error(transport: str = "rest"): +def test_get_maven_artifact_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19210,13 +20434,13 @@ def test_get_file_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_file( - file.GetFileRequest(), + client.get_maven_artifact( + artifact.GetMavenArtifactRequest(), name="name_value", ) -def test_list_tags_rest_use_cached_wrapped_rpc(): +def test_list_npm_packages_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19230,29 +20454,129 @@ def test_list_tags_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_tags in client._transport._wrapped_methods + assert client._transport.list_npm_packages in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_tags] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_npm_packages + ] = mock_rpc request = {} - client.list_tags(request) + client.list_npm_packages(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_tags(request) + client.list_npm_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_tags_rest_flattened(): +def test_list_npm_packages_rest_required_fields( + request_type=artifact.ListNpmPackagesRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_npm_packages._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_npm_packages._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = artifact.ListNpmPackagesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.ListNpmPackagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_npm_packages(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_npm_packages_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_npm_packages._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_npm_packages_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19261,11 +20585,11 @@ def test_list_tags_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = tag.ListTagsResponse() + return_value = artifact.ListNpmPackagesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "parent": "projects/sample1/locations/sample2/repositories/sample3" } # get truthy value for each flattened field @@ -19278,25 +20602,25 @@ def test_list_tags_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = tag.ListTagsResponse.pb(return_value) + return_value = artifact.ListNpmPackagesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_tags(**mock_args) + client.list_npm_packages(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/tags" + "%s/v1/{parent=projects/*/locations/*/repositories/*}/npmPackages" % client.transport._host, args[1], ) -def test_list_tags_rest_flattened_error(transport: str = "rest"): +def test_list_npm_packages_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19305,13 +20629,13 @@ def test_list_tags_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_tags( - tag.ListTagsRequest(), + client.list_npm_packages( + artifact.ListNpmPackagesRequest(), parent="parent_value", ) -def test_list_tags_rest_pager(transport: str = "rest"): +def test_list_npm_packages_rest_pager(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19323,28 +20647,28 @@ def test_list_tags_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), - tag.Tag(), + artifact.ListNpmPackagesResponse( + npm_packages=[ + artifact.NpmPackage(), + artifact.NpmPackage(), + artifact.NpmPackage(), ], next_page_token="abc", ), - tag.ListTagsResponse( - tags=[], + artifact.ListNpmPackagesResponse( + npm_packages=[], next_page_token="def", ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), + artifact.ListNpmPackagesResponse( + npm_packages=[ + artifact.NpmPackage(), ], next_page_token="ghi", ), - tag.ListTagsResponse( - tags=[ - tag.Tag(), - tag.Tag(), + artifact.ListNpmPackagesResponse( + npm_packages=[ + artifact.NpmPackage(), + artifact.NpmPackage(), ], ), ) @@ -19352,7 +20676,7 @@ def test_list_tags_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(tag.ListTagsResponse.to_json(x) for x in response) + response = tuple(artifact.ListNpmPackagesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -19360,21 +20684,21 @@ def test_list_tags_rest_pager(transport: str = "rest"): req.side_effect = return_values sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "parent": "projects/sample1/locations/sample2/repositories/sample3" } - pager = client.list_tags(request=sample_request) + pager = client.list_npm_packages(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, tag.Tag) for i in results) + assert all(isinstance(i, artifact.NpmPackage) for i in results) - pages = list(client.list_tags(request=sample_request).pages) + pages = list(client.list_npm_packages(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_tag_rest_use_cached_wrapped_rpc(): +def test_get_npm_package_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19388,124 +20712,112 @@ def test_get_tag_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_tag in client._transport._wrapped_methods + assert client._transport.get_npm_package in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_tag] = mock_rpc + client._transport._wrapped_methods[client._transport.get_npm_package] = mock_rpc request = {} - client.get_tag(request) + client.get_npm_package(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_tag(request) + client.get_npm_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_tag_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_get_npm_package_rest_required_fields( + request_type=artifact.GetNpmPackageRequest, +): + transport_class = transports.ArtifactRegistryRestTransport - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = tag.Tag() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" - } + # verify fields with default values are dropped - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_npm_package._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = tag.Tag.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # verify required fields with default values are now present - client.get_tag(**mock_args) + jsonified_request["name"] = "name_value" - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/tags/*}" - % client.transport._host, - args[1], - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_npm_package._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_get_tag_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_tag( - tag.GetTagRequest(), - name="name_value", - ) - + # Designate an appropriate value for the returned response. + return_value = artifact.NpmPackage() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result -def test_create_tag_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + response_value = Response() + response_value.status_code = 200 - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Convert return value to protobuf type + return_value = artifact.NpmPackage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Ensure method has been cached - assert client._transport.create_tag in client._transport._wrapped_methods + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_tag] = mock_rpc + response = client.get_npm_package(request) - request = {} - client.create_tag(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.create_tag(request) +def test_get_npm_package_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport.get_npm_package._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_tag_rest_flattened(): +def test_get_npm_package_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19514,18 +20826,16 @@ def test_create_tag_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gda_tag.Tag() + return_value = artifact.NpmPackage() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/npmPackages/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - tag=gda_tag.Tag(name="name_value"), - tag_id="tag_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -19533,25 +20843,25 @@ def test_create_tag_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gda_tag.Tag.pb(return_value) + return_value = artifact.NpmPackage.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_tag(**mock_args) + client.get_npm_package(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/tags" + "%s/v1/{name=projects/*/locations/*/repositories/*/npmPackages/*}" % client.transport._host, args[1], ) -def test_create_tag_rest_flattened_error(transport: str = "rest"): +def test_get_npm_package_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19560,15 +20870,13 @@ def test_create_tag_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_tag( - gda_tag.CreateTagRequest(), - parent="parent_value", - tag=gda_tag.Tag(name="name_value"), - tag_id="tag_id_value", + client.get_npm_package( + artifact.GetNpmPackageRequest(), + name="name_value", ) -def test_update_tag_rest_use_cached_wrapped_rpc(): +def test_list_python_packages_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19582,128 +20890,131 @@ def test_update_tag_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_tag in client._transport._wrapped_methods + assert ( + client._transport.list_python_packages in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_tag] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_python_packages + ] = mock_rpc request = {} - client.update_tag(request) + client.list_python_packages(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_tag(request) + client.list_python_packages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_tag_rest_flattened(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_list_python_packages_rest_required_fields( + request_type=artifact.ListPythonPackagesRequest, +): + transport_class = transports.ArtifactRegistryRestTransport - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gda_tag.Tag() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "tag": { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" - } - } + # verify fields with default values are dropped - # get truthy value for each flattened field - mock_args = dict( - tag=gda_tag.Tag(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_python_packages._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gda_tag.Tag.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # verify required fields with default values are now present - client.update_tag(**mock_args) + jsonified_request["parent"] = "parent_value" - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{tag.name=projects/*/locations/*/repositories/*/packages/*/tags/*}" - % client.transport._host, - args[1], + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_python_packages._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_update_tag_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_tag( - gda_tag.UpdateTagRequest(), - tag=gda_tag.Tag(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - + # Designate an appropriate value for the returned response. + return_value = artifact.ListPythonPackagesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result -def test_delete_tag_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + response_value = Response() + response_value.status_code = 200 - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Convert return value to protobuf type + return_value = artifact.ListPythonPackagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Ensure method has been cached - assert client._transport.delete_tag in client._transport._wrapped_methods + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_tag] = mock_rpc + response = client.list_python_packages(request) - request = {} - client.delete_tag(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.delete_tag(request) +def test_list_python_packages_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport.list_python_packages._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_delete_tag_rest_flattened(): +def test_list_python_packages_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19712,40 +21023,42 @@ def test_delete_tag_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = artifact.ListPythonPackagesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + "parent": "projects/sample1/locations/sample2/repositories/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = artifact.ListPythonPackagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_tag(**mock_args) + client.list_python_packages(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/tags/*}" + "%s/v1/{parent=projects/*/locations/*/repositories/*}/pythonPackages" % client.transport._host, args[1], ) -def test_delete_tag_rest_flattened_error(transport: str = "rest"): +def test_list_python_packages_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19754,13 +21067,78 @@ def test_delete_tag_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_tag( - tag.DeleteTagRequest(), - name="name_value", + client.list_python_packages( + artifact.ListPythonPackagesRequest(), + parent="parent_value", ) -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): +def test_list_python_packages_rest_pager(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + artifact.ListPythonPackagesResponse( + python_packages=[ + artifact.PythonPackage(), + artifact.PythonPackage(), + artifact.PythonPackage(), + ], + next_page_token="abc", + ), + artifact.ListPythonPackagesResponse( + python_packages=[], + next_page_token="def", + ), + artifact.ListPythonPackagesResponse( + python_packages=[ + artifact.PythonPackage(), + ], + next_page_token="ghi", + ), + artifact.ListPythonPackagesResponse( + python_packages=[ + artifact.PythonPackage(), + artifact.PythonPackage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + artifact.ListPythonPackagesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + + pager = client.list_python_packages(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, artifact.PythonPackage) for i in results) + + pages = list(client.list_python_packages(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_python_package_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19774,37 +21152,41 @@ def test_set_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.get_python_package in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_python_package + ] = mock_rpc request = {} - client.set_iam_policy(request) + client.get_python_package(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_iam_policy(request) + client.get_python_package(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_set_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.SetIamPolicyRequest, +def test_get_python_package_rest_required_fields( + request_type=artifact.GetPythonPackageRequest, ): transport_class = transports.ArtifactRegistryRestTransport request_init = {} - request_init["resource"] = "" + request_init["name"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -19813,21 +21195,21 @@ def test_set_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).get_python_package._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).get_python_package._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19836,7 +21218,7 @@ def test_set_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = artifact.PythonPackage() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19845,48 +21227,100 @@ def test_set_iam_policy_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = artifact.PythonPackage.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.get_python_package(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_iam_policy_rest_unset_required_fields(): +def test_get_python_package_rest_unset_required_fields(): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "resource", - "policy", - ) + unset_fields = transport.get_python_package._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_python_package_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.PythonPackage() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/pythonPackages/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = artifact.PythonPackage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_python_package(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/pythonPackages/*}" + % client.transport._host, + args[1], ) + + +def test_get_python_package_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_python_package( + artifact.GetPythonPackageRequest(), + name="name_value", + ) + -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): +def test_import_apt_artifacts_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19900,112 +21334,37 @@ def test_get_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.import_apt_artifacts in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_apt_artifacts + ] = mock_rpc request = {} - client.get_iam_policy(request) + client.import_apt_artifacts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_iam_policy(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_apt_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.GetIamPolicyRequest, -): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = "resource_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("options",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" - - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_iam_policy(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("options",)) & set(("resource",))) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): +def test_import_yum_artifacts_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20020,7 +21379,7 @@ def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.test_iam_permissions in client._transport._wrapped_methods + client._transport.import_yum_artifacts in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -20029,117 +21388,27 @@ def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.test_iam_permissions + client._transport.import_yum_artifacts ] = mock_rpc request = {} - client.test_iam_permissions(request) + client.import_yum_artifacts(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.test_iam_permissions(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_yum_artifacts(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_test_iam_permissions_rest_required_fields( - request_type=iam_policy_pb2.TestIamPermissionsRequest, -): - transport_class = transports.ArtifactRegistryRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = "resource_value" - jsonified_request["permissions"] = "permissions_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == "permissions_value" - - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.test_iam_permissions(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.ArtifactRegistryRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "resource", - "permissions", - ) - ) - ) - - -def test_get_project_settings_rest_use_cached_wrapped_rpc(): +def test_list_repositories_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20153,9 +21422,7 @@ def test_get_project_settings_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_project_settings in client._transport._wrapped_methods - ) + assert client._transport.list_repositories in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -20163,29 +21430,29 @@ def test_get_project_settings_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_project_settings + client._transport.list_repositories ] = mock_rpc request = {} - client.get_project_settings(request) + client.list_repositories(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_project_settings(request) + client.list_repositories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_project_settings_rest_required_fields( - request_type=settings.GetProjectSettingsRequest, +def test_list_repositories_rest_required_fields( + request_type=repository.ListRepositoriesRequest, ): transport_class = transports.ArtifactRegistryRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20196,21 +21463,30 @@ def test_get_project_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_project_settings._get_unset_required_fields(jsonified_request) + ).list_repositories._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_project_settings._get_unset_required_fields(jsonified_request) + ).list_repositories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20219,7 +21495,7 @@ def test_get_project_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = settings.ProjectSettings() + return_value = repository.ListRepositoriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20240,29 +21516,39 @@ def test_get_project_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = settings.ProjectSettings.pb(return_value) + return_value = repository.ListRepositoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_project_settings(request) + response = client.list_repositories(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_project_settings_rest_unset_required_fields(): +def test_list_repositories_rest_unset_required_fields(): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_project_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_repositories._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_get_project_settings_rest_flattened(): +def test_list_repositories_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20271,14 +21557,14 @@ def test_get_project_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = settings.ProjectSettings() + return_value = repository.ListRepositoriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/projectSettings"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -20286,23 +21572,25 @@ def test_get_project_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = settings.ProjectSettings.pb(return_value) + return_value = repository.ListRepositoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_project_settings(**mock_args) + client.list_repositories(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/projectSettings}" % client.transport._host, args[1] + "%s/v1/{parent=projects/*/locations/*}/repositories" + % client.transport._host, + args[1], ) -def test_get_project_settings_rest_flattened_error(transport: str = "rest"): +def test_list_repositories_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20311,54 +21599,195 @@ def test_get_project_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_project_settings( - settings.GetProjectSettingsRequest(), - name="name_value", - ) - - -def test_update_project_settings_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_project_settings - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + client.list_repositories( + repository.ListRepositoriesRequest(), + parent="parent_value", ) - client._transport._wrapped_methods[ - client._transport.update_project_settings - ] = mock_rpc - request = {} - client.update_project_settings(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +def test_list_repositories_rest_pager(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - client.update_project_settings(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + repository.ListRepositoriesResponse( + repositories=[ + repository.Repository(), + repository.Repository(), + repository.Repository(), + ], + next_page_token="abc", + ), + repository.ListRepositoriesResponse( + repositories=[], + next_page_token="def", + ), + repository.ListRepositoriesResponse( + repositories=[ + repository.Repository(), + ], + next_page_token="ghi", + ), + repository.ListRepositoriesResponse( + repositories=[ + repository.Repository(), + repository.Repository(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + repository.ListRepositoriesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_repositories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repository.Repository) for i in results) + + pages = list(client.list_repositories(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_repository_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_repository in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_repository] = mock_rpc + + request = {} + client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_project_settings_rest_flattened(): +def test_get_repository_rest_required_fields( + request_type=repository.GetRepositoryRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repository.Repository() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = repository.Repository.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_repository(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_repository_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_repository_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20367,17 +21796,16 @@ def test_update_project_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = settings.ProjectSettings() + return_value = repository.Repository() # get arguments that satisfy an http rule for this method sample_request = { - "project_settings": {"name": "projects/sample1/projectSettings"} + "name": "projects/sample1/locations/sample2/repositories/sample3" } # get truthy value for each flattened field mock_args = dict( - project_settings=settings.ProjectSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -20385,25 +21813,25 @@ def test_update_project_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = settings.ProjectSettings.pb(return_value) + return_value = repository.Repository.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_project_settings(**mock_args) + client.get_repository(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{project_settings.name=projects/*/projectSettings}" + "%s/v1/{name=projects/*/locations/*/repositories/*}" % client.transport._host, args[1], ) -def test_update_project_settings_rest_flattened_error(transport: str = "rest"): +def test_get_repository_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20412,14 +21840,13 @@ def test_update_project_settings_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_project_settings( - settings.UpdateProjectSettingsRequest(), - project_settings=settings.ProjectSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_repository( + repository.GetRepositoryRequest(), + name="name_value", ) -def test_get_vpcsc_config_rest_use_cached_wrapped_rpc(): +def test_create_repository_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20433,7 +21860,7 @@ def test_get_vpcsc_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_vpcsc_config in client._transport._wrapped_methods + assert client._transport.create_repository in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -20441,29 +21868,34 @@ def test_get_vpcsc_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_vpcsc_config + client._transport.create_repository ] = mock_rpc request = {} - client.get_vpcsc_config(request) + client.create_repository(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_vpcsc_config(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_vpcsc_config_rest_required_fields( - request_type=vpcsc_config.GetVPCSCConfigRequest, +def test_create_repository_rest_required_fields( + request_type=gda_repository.CreateRepositoryRequest, ): transport_class = transports.ArtifactRegistryRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["repository_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20471,24 +21903,32 @@ def test_get_vpcsc_config_rest_required_fields( ) # verify fields with default values are dropped + assert "repositoryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_vpcsc_config._get_unset_required_fields(jsonified_request) + ).create_repository._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "repositoryId" in jsonified_request + assert jsonified_request["repositoryId"] == request_init["repository_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["repositoryId"] = "repository_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_vpcsc_config._get_unset_required_fields(jsonified_request) + ).create_repository._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("repository_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "repositoryId" in jsonified_request + assert jsonified_request["repositoryId"] == "repository_id_value" client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20497,7 +21937,7 @@ def test_get_vpcsc_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vpcsc_config.VPCSCConfig() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20509,38 +21949,51 @@ def test_get_vpcsc_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vpcsc_config.VPCSCConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_vpcsc_config(request) + response = client.create_repository(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "repositoryId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_vpcsc_config_rest_unset_required_fields(): +def test_create_repository_rest_unset_required_fields(): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_vpcsc_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_repository._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("repositoryId",)) + & set( + ( + "parent", + "repositoryId", + "repository", + ) + ) + ) -def test_get_vpcsc_config_rest_flattened(): +def test_create_repository_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20549,39 +22002,44 @@ def test_get_vpcsc_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vpcsc_config.VPCSCConfig() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/vpcscConfig"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + repository=gda_repository.Repository( + maven_config=gda_repository.Repository.MavenRepositoryConfig( + allow_snapshot_overwrites=True + ) + ), + repository_id="repository_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vpcsc_config.VPCSCConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_vpcsc_config(**mock_args) + client.create_repository(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/vpcscConfig}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/repositories" + % client.transport._host, args[1], ) -def test_get_vpcsc_config_rest_flattened_error(transport: str = "rest"): +def test_create_repository_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20590,13 +22048,19 @@ def test_get_vpcsc_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_vpcsc_config( - vpcsc_config.GetVPCSCConfigRequest(), - name="name_value", + client.create_repository( + gda_repository.CreateRepositoryRequest(), + parent="parent_value", + repository=gda_repository.Repository( + maven_config=gda_repository.Repository.MavenRepositoryConfig( + allow_snapshot_overwrites=True + ) + ), + repository_id="repository_id_value", ) -def test_update_vpcsc_config_rest_use_cached_wrapped_rpc(): +def test_update_repository_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20610,9 +22074,7 @@ def test_update_vpcsc_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_vpcsc_config in client._transport._wrapped_methods - ) + assert client._transport.update_repository in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -20620,23 +22082,23 @@ def test_update_vpcsc_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_vpcsc_config + client._transport.update_repository ] = mock_rpc request = {} - client.update_vpcsc_config(request) + client.update_repository(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_vpcsc_config(request) + client.update_repository(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_vpcsc_config_rest_flattened(): +def test_update_repository_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20645,16 +22107,22 @@ def test_update_vpcsc_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gda_vpcsc_config.VPCSCConfig() + return_value = gda_repository.Repository() # get arguments that satisfy an http rule for this method sample_request = { - "vpcsc_config": {"name": "projects/sample1/locations/sample2/vpcscConfig"} + "repository": { + "name": "projects/sample1/locations/sample2/repositories/sample3" + } } # get truthy value for each flattened field mock_args = dict( - vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + repository=gda_repository.Repository( + maven_config=gda_repository.Repository.MavenRepositoryConfig( + allow_snapshot_overwrites=True + ) + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -20663,25 +22131,25 @@ def test_update_vpcsc_config_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gda_vpcsc_config.VPCSCConfig.pb(return_value) + return_value = gda_repository.Repository.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_vpcsc_config(**mock_args) + client.update_repository(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{vpcsc_config.name=projects/*/locations/*/vpcscConfig}" + "%s/v1/{repository.name=projects/*/locations/*/repositories/*}" % client.transport._host, args[1], ) -def test_update_vpcsc_config_rest_flattened_error(transport: str = "rest"): +def test_update_repository_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20690,1967 +22158,9431 @@ def test_update_vpcsc_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_vpcsc_config( - gda_vpcsc_config.UpdateVPCSCConfigRequest(), - vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + client.update_repository( + gda_repository.UpdateRepositoryRequest(), + repository=gda_repository.Repository( + maven_config=gda_repository.Repository.MavenRepositoryConfig( + allow_snapshot_overwrites=True + ) + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ArtifactRegistryGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_delete_repository_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ArtifactRegistryGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ArtifactRegistryClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + transport="rest", ) - # It is an error to provide an api_key and a transport instance. - transport = transports.ArtifactRegistryGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ArtifactRegistryClient( - client_options=options, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ArtifactRegistryClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Ensure method has been cached + assert client._transport.delete_repository in client._transport._wrapped_methods - # It is an error to provide scopes and a transport instance. - transport = transports.ArtifactRegistryGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ArtifactRegistryClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.delete_repository + ] = mock_rpc + request = {} + client.delete_repository(request) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ArtifactRegistryGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ArtifactRegistryClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ArtifactRegistryGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ArtifactRegistryGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "transport_class", - [ - transports.ArtifactRegistryGrpcTransport, - transports.ArtifactRegistryGrpcAsyncIOTransport, - transports.ArtifactRegistryRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + client.delete_repository(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_transport_kind_grpc(): - transport = ArtifactRegistryClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" +def test_delete_repository_rest_required_fields( + request_type=repository.DeleteRepositoryRequest, +): + transport_class = transports.ArtifactRegistryRestTransport -def test_initialize_client_w_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - assert client is not None + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_docker_images_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_docker_images), "__call__" - ) as call: - call.return_value = artifact.ListDockerImagesResponse() - client.list_docker_images(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListDockerImagesRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_docker_image_empty_call_grpc(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_docker_image), "__call__") as call: - call.return_value = artifact.DockerImage() - client.get_docker_image(request=None) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetDockerImageRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_repository(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_maven_artifacts_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_maven_artifacts), "__call__" - ) as call: - call.return_value = artifact.ListMavenArtifactsResponse() - client.list_maven_artifacts(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListMavenArtifactsRequest() +def test_delete_repository_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.delete_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_maven_artifact_empty_call_grpc(): +def test_delete_repository_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_maven_artifact), "__call__" - ) as call: - call.return_value = artifact.MavenArtifact() - client.get_maven_artifact(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetMavenArtifactRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_npm_packages_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_npm_packages), "__call__" - ) as call: - call.return_value = artifact.ListNpmPackagesResponse() - client.list_npm_packages(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListNpmPackagesRequest() + client.delete_repository(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_npm_package_empty_call_grpc(): +def test_delete_repository_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_npm_package), "__call__") as call: - call.return_value = artifact.NpmPackage() - client.get_npm_package(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_repository( + repository.DeleteRepositoryRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetNpmPackageRequest() - assert args[0] == request_msg +def test_list_packages_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_python_packages_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.list_packages in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_python_packages), "__call__" - ) as call: - call.return_value = artifact.ListPythonPackagesResponse() - client.list_python_packages(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_packages] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListPythonPackagesRequest() + request = {} + client.list_packages(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_packages(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_python_package_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_python_package), "__call__" - ) as call: - call.return_value = artifact.PythonPackage() - client.get_python_package(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetPythonPackageRequest() +def test_list_packages_rest_required_fields(request_type=package.ListPackagesRequest): + transport_class = transports.ArtifactRegistryRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_apt_artifacts_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_packages._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_apt_artifacts), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.import_apt_artifacts(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = apt_artifact.ImportAptArtifactsRequest() + jsonified_request["parent"] = "parent_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_packages._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_yum_artifacts_empty_call_grpc(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_yum_artifacts), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.import_yum_artifacts(request=None) + # Designate an appropriate value for the returned response. + return_value = package.ListPackagesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = yum_artifact.ImportYumArtifactsRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = package.ListPackagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_repositories_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.list_packages(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), "__call__" - ) as call: - call.return_value = repository.ListRepositoriesResponse() - client.list_repositories(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = repository.ListRepositoriesRequest() - assert args[0] == request_msg +def test_list_packages_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_packages._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_repository_empty_call_grpc(): +def test_list_packages_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_repository), "__call__") as call: - call.return_value = repository.Repository() - client.get_repository(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = repository.GetRepositoryRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = package.ListPackagesResponse() + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_repository_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_repository(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = package.ListPackagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_repository.CreateRepositoryRequest() + client.list_packages(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*}/packages" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_repository_empty_call_grpc(): +def test_list_packages_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_repository), "__call__" - ) as call: - call.return_value = gda_repository.Repository() - client.update_repository(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_repository.UpdateRepositoryRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_packages( + package.ListPackagesRequest(), + parent="parent_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_repository_empty_call_grpc(): +def test_list_packages_rest_pager(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_repository(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + package.ListPackagesResponse( + packages=[ + package.Package(), + package.Package(), + package.Package(), + ], + next_page_token="abc", + ), + package.ListPackagesResponse( + packages=[], + next_page_token="def", + ), + package.ListPackagesResponse( + packages=[ + package.Package(), + ], + next_page_token="ghi", + ), + package.ListPackagesResponse( + packages=[ + package.Package(), + package.Package(), + ], + ), + ) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = repository.DeleteRepositoryRequest() + # Wrap the values into proper Response objs + response = tuple(package.ListPackagesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + pager = client.list_packages(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_packages_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, package.Package) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_packages), "__call__") as call: - call.return_value = package.ListPackagesResponse() - client.list_packages(request=None) + pages = list(client.list_packages(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = package.ListPackagesRequest() - assert args[0] == request_msg +def test_get_package_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_package_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.get_package in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_package), "__call__") as call: - call.return_value = package.Package() - client.get_package(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_package] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = package.GetPackageRequest() + request = {} + client.get_package(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.get_package(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_package_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_package), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_package(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = package.DeletePackageRequest() +def test_get_package_rest_required_fields(request_type=package.GetPackageRequest): + transport_class = transports.ArtifactRegistryRestTransport - assert args[0] == request_msg + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_versions_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_package._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value = version.ListVersionsResponse() - client.list_versions(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.ListVersionsRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_package._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_version_empty_call_grpc(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value = version.Version() - client.get_version(request=None) + # Designate an appropriate value for the returned response. + return_value = package.Package() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.GetVersionRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = package.Package.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_version_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.get_package(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_version(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.DeleteVersionRequest() - assert args[0] == request_msg +def test_get_package_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_package._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_delete_versions_empty_call_grpc(): +def test_get_package_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_delete_versions), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.batch_delete_versions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.BatchDeleteVersionsRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = package.Package() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_files_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_files), "__call__") as call: - call.return_value = file.ListFilesResponse() - client.list_files(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = package.Package.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = file.ListFilesRequest() + client.get_package(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_file_empty_call_grpc(): +def test_get_package_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_file), "__call__") as call: - call.return_value = file.File() - client.get_file(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = file.GetFileRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_package( + package.GetPackageRequest(), + name="name_value", + ) - assert args[0] == request_msg +def test_delete_package_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tags_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: - call.return_value = tag.ListTagsResponse() - client.list_tags(request=None) + # Ensure method has been cached + assert client._transport.delete_package in client._transport._wrapped_methods - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = tag.ListTagsRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_package] = mock_rpc - assert args[0] == request_msg + request = {} + client.delete_package(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_tag_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: - call.return_value = tag.Tag() - client.get_tag(request=None) + client.delete_package(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = tag.GetTagRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_delete_package_rest_required_fields(request_type=package.DeletePackageRequest): + transport_class = transports.ArtifactRegistryRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_tag_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: - call.return_value = gda_tag.Tag() - client.create_tag(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_tag.CreateTagRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_package._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_package._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_tag_empty_call_grpc(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: - call.return_value = gda_tag.Tag() - client.update_tag(request=None) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_tag.UpdateTagRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_package(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_tag_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: - call.return_value = None - client.delete_tag(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = tag.DeleteTagRequest() +def test_delete_package_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.delete_package._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): +def test_delete_package_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() + client.delete_package(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): +def test_delete_package_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_package( + package.DeletePackageRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request_msg +def test_list_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_project_settings_empty_call_grpc(): - client = ArtifactRegistryClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.list_versions in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: - call.return_value = settings.ProjectSettings() - client.get_project_settings(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = settings.GetProjectSettingsRequest() + request = {} + client.list_versions(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_versions(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_project_settings_empty_call_grpc(): + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_versions_rest_flattened(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: - call.return_value = settings.ProjectSettings() - client.update_project_settings(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = version.ListVersionsResponse() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = settings.UpdateProjectSettingsRequest() + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = version.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_vpcsc_config_empty_call_grpc(): + client.list_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/versions" + % client.transport._host, + args[1], + ) + + +def test_list_versions_rest_flattened_error(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: - call.return_value = vpcsc_config.VPCSCConfig() - client.get_vpcsc_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = vpcsc_config.GetVPCSCConfigRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_versions( + version.ListVersionsRequest(), + parent="parent_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_vpcsc_config_empty_call_grpc(): +def test_list_versions_rest_pager(transport: str = "rest"): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: - call.return_value = gda_vpcsc_config.VPCSCConfig() - client.update_vpcsc_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_vpcsc_config.UpdateVPCSCConfigRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + version.ListVersionsResponse( + versions=[ + version.Version(), + version.Version(), + version.Version(), + ], + next_page_token="abc", + ), + version.ListVersionsResponse( + versions=[], + next_page_token="def", + ), + version.ListVersionsResponse( + versions=[ + version.Version(), + ], + next_page_token="ghi", + ), + version.ListVersionsResponse( + versions=[ + version.Version(), + version.Version(), + ], + ), + ) + # Two responses for two calls + response = response + response + # Wrap the values into proper Response objs + response = tuple(version.ListVersionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values -def test_transport_kind_grpc_asyncio(): - transport = ArtifactRegistryAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } + pager = client.list_versions(request=sample_request) -def test_initialize_client_w_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, version.Version) for i in results) + pages = list(client.list_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_docker_images_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_docker_images), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.ListDockerImagesResponse( - next_page_token="next_page_token_value", - ) +def test_get_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - await client.list_docker_images(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListDockerImagesRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert client._transport.get_version in client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_version] = mock_rpc -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_docker_image_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + request = {} + client.get_version(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_docker_image), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.DockerImage( - name="name_value", - uri="uri_value", - tags=["tags_value"], - image_size_bytes=1699, - media_type="media_type_value", - ) - ) - await client.get_docker_image(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetDockerImageRequest() + client.get_version(request) - assert args[0] == request_msg + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_maven_artifacts_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_version_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_maven_artifacts), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.ListMavenArtifactsResponse( - next_page_token="next_page_token_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = version.Version() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.list_maven_artifacts(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListMavenArtifactsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = version.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - assert args[0] == request_msg + client.get_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/versions/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_maven_artifact_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_version_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_maven_artifact), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.MavenArtifact( - name="name_value", - pom_uri="pom_uri_value", - group_id="group_id_value", - artifact_id="artifact_id_value", - version="version_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_version( + version.GetVersionRequest(), + name="name_value", ) - await client.get_maven_artifact(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetMavenArtifactRequest() - assert args[0] == request_msg +def test_delete_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_npm_packages_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.delete_version in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_npm_packages), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.ListNpmPackagesResponse( - next_page_token="next_page_token_value", - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.list_npm_packages(request=None) + client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListNpmPackagesRequest() + request = {} + client.delete_version(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_npm_package_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + client.delete_version(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_npm_package), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.NpmPackage( - name="name_value", - package_name="package_name_value", - version="version_value", - tags=["tags_value"], - ) - ) - await client.get_npm_package(request=None) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetNpmPackageRequest() - assert args[0] == request_msg +def test_delete_version_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_python_packages_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_python_packages), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.ListPythonPackagesResponse( - next_page_token="next_page_token_value", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.list_python_packages(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.ListPythonPackagesRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - assert args[0] == request_msg + client.delete_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/versions/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_python_package_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_version_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_python_package), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - artifact.PythonPackage( - name="name_value", - uri="uri_value", - package_name="package_name_value", - version="version_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_version( + version.DeleteVersionRequest(), + name="name_value", ) - await client.get_python_package(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = artifact.GetPythonPackageRequest() - assert args[0] == request_msg +def test_batch_delete_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_import_apt_artifacts_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_apt_artifacts), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Ensure method has been cached + assert ( + client._transport.batch_delete_versions + in client._transport._wrapped_methods ) - await client.import_apt_artifacts(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = apt_artifact.ImportAptArtifactsRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_delete_versions + ] = mock_rpc - assert args[0] == request_msg + request = {} + client.batch_delete_versions(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_import_yum_artifacts_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_yum_artifacts), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.import_yum_artifacts(request=None) + client.batch_delete_versions(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = yum_artifact.ImportYumArtifactsRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_batch_delete_versions_rest_required_fields( + request_type=version.BatchDeleteVersionsRequest, +): + transport_class = transports.ArtifactRegistryRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_repositories_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request_init["names"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - repository.ListRepositoriesResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_repositories(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = repository.ListRepositoriesRequest() + # verify fields with default values are dropped - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_repository_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + jsonified_request["names"] = "names_value" - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_repository), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - repository.Repository( - name="name_value", - format_=repository.Repository.Format.DOCKER, - description="description_value", - kms_key_name="kms_key_name_value", - mode=repository.Repository.Mode.STANDARD_REPOSITORY, - size_bytes=1089, - satisfies_pzs=True, - cleanup_policy_dry_run=True, - ) - ) - await client.get_repository(request=None) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = repository.GetRepositoryRequest() + # verify required fields with non-default values are left alone + assert "names" in jsonified_request + assert jsonified_request["names"] == "names_value" - assert args[0] == request_msg + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_repository_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_repository(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_repository.CreateRepositoryRequest() + response = client.batch_delete_versions(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_repository_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_batch_delete_versions_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_repository), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_repository.Repository( - name="name_value", - format_=gda_repository.Repository.Format.DOCKER, - description="description_value", - kms_key_name="kms_key_name_value", - mode=gda_repository.Repository.Mode.STANDARD_REPOSITORY, - size_bytes=1089, - satisfies_pzs=True, - cleanup_policy_dry_run=True, - ) - ) - await client.update_repository(request=None) + unset_fields = transport.batch_delete_versions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("names",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_repository.UpdateRepositoryRequest() - assert args[0] == request_msg +def test_batch_delete_versions_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_repository_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + names=["names_value"], ) - await client.delete_repository(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = repository.DeleteRepositoryRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - assert args[0] == request_msg + client.batch_delete_versions(**mock_args) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/versions:batchDelete" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_packages_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_batch_delete_versions_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_packages), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - package.ListPackagesResponse( - next_page_token="next_page_token_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_delete_versions( + version.BatchDeleteVersionsRequest(), + parent="parent_value", + names=["names_value"], ) - await client.list_packages(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = package.ListPackagesRequest() - assert args[0] == request_msg +def test_update_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_package_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.update_version in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_package), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - package.Package( - name="name_value", - display_name="display_name_value", - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.get_package(request=None) + client._transport._wrapped_methods[client._transport.update_version] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = package.GetPackageRequest() + request = {} + client.update_version(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.update_version(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_package_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_version_rest_required_fields( + request_type=gda_version.UpdateVersionRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_package), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_package(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = package.DeletePackageRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_versions_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - version.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_versions(request=None) + # Designate an appropriate value for the returned response. + return_value = gda_version.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.ListVersionsRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = gda_version.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_version_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response = client.update_version(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - version.Version( - name="name_value", - description="description_value", - ) - ) - await client.get_version(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.GetVersionRequest() - assert args[0] == request_msg +def test_update_version_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.update_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("version",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_version_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_update_version_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_version.Version() + + # get arguments that satisfy an http rule for this method + sample_request = { + "version": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + version=gda_version.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - await client.delete_version(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.DeleteVersionRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_version.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - assert args[0] == request_msg + client.update_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{version.name=projects/*/locations/*/repositories/*/packages/*/versions/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_delete_versions_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_update_version_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_delete_versions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_version( + gda_version.UpdateVersionRequest(), + version=gda_version.Version(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - await client.batch_delete_versions(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = version.BatchDeleteVersionsRequest() - assert args[0] == request_msg +def test_list_files_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_files_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.list_files in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_files), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - file.ListFilesResponse( - next_page_token="next_page_token_value", - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.list_files(request=None) + client._transport._wrapped_methods[client._transport.list_files] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = file.ListFilesRequest() + request = {} + client.list_files(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_files(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_file_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_files_rest_required_fields(request_type=file.ListFilesRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_file), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - file.File( - name="name_value", - size_bytes=1089, - owner="owner_value", - ) - ) - await client.get_file(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = file.GetFileRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_files._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_tags_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_files._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) ) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_tags), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - tag.ListTagsResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_tags(request=None) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = tag.ListTagsRequest() + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = file.ListFilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = file.ListFilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_tag_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_files(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_files_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_tag), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - tag.Tag( - name="name_value", - version="version_value", + unset_fields = transport.list_files._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - await client.get_tag(request=None) + & set(("parent",)) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = tag.GetTagRequest() - assert args[0] == request_msg +def test_list_files_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = file.ListFilesResponse() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_tag_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_tag), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_tag.Tag( - name="name_value", - version="version_value", - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", ) - await client.create_tag(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_tag.CreateTagRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = file.ListFilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - assert args[0] == request_msg + client.list_files(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*}/files" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_tag_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_files_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_tag), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_tag.Tag( - name="name_value", - version="version_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_files( + file.ListFilesRequest(), + parent="parent_value", ) - await client.update_tag(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_tag.UpdateTagRequest() - assert args[0] == request_msg +def test_list_files_rest_pager(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + file.ListFilesResponse( + files=[ + file.File(), + file.File(), + file.File(), + ], + next_page_token="abc", + ), + file.ListFilesResponse( + files=[], + next_page_token="def", + ), + file.ListFilesResponse( + files=[ + file.File(), + ], + next_page_token="ghi", + ), + file.ListFilesResponse( + files=[ + file.File(), + file.File(), + ], + ), + ) + # Two responses for two calls + response = response + response -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_tag_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Wrap the values into proper Response objs + response = tuple(file.ListFilesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag(request=None) + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = tag.DeleteTagRequest() + pager = client.list_files(request=sample_request) - assert args[0] == request_msg + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, file.File) for i in results) + pages = list(client.list_files(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) +def test_get_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - await client.set_iam_policy(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert client._transport.get_file in client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_file] = mock_rpc -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + request = {} + client.get_file(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - ) - await client.get_iam_policy(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() + client.get_file(request) - assert args[0] == request_msg + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_file_rest_required_fields(request_type=file.GetFileRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = file.File() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = file.File.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_file_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_file_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = file.File() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = file.File.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/files/**}" + % client.transport._host, + args[1], + ) + + +def test_get_file_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file( + file.GetFileRequest(), + name="name_value", + ) + + +def test_delete_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_file] = mock_rpc + + request = {} + client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_file_rest_required_fields(request_type=file.DeleteFileRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_file_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_file_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/files/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_file_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_file( + file.DeleteFileRequest(), + name="name_value", + ) + + +def test_update_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_file] = mock_rpc + + request = {} + client.update_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_file_rest_required_fields(request_type=gda_file.UpdateFileRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_file._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gda_file.File() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gda_file.File.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_file_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_file._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "file", + "updateMask", + ) + ) + ) + + +def test_update_file_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_file.File() + + # get arguments that satisfy an http rule for this method + sample_request = { + "file": { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + file=gda_file.File(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_file.File.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{file.name=projects/*/locations/*/repositories/*/files/*}" + % client.transport._host, + args[1], + ) + + +def test_update_file_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_file( + gda_file.UpdateFileRequest(), + file=gda_file.File(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_tags_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tags in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_tags] = mock_rpc + + request = {} + client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tags(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_tags_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tag.ListTagsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tag.ListTagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tags(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/tags" + % client.transport._host, + args[1], + ) + + +def test_list_tags_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tags( + tag.ListTagsRequest(), + parent="parent_value", + ) + + +def test_list_tags_rest_pager(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + tag.Tag(), + ], + next_page_token="abc", + ), + tag.ListTagsResponse( + tags=[], + next_page_token="def", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + ], + next_page_token="ghi", + ), + tag.ListTagsResponse( + tags=[ + tag.Tag(), + tag.Tag(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(tag.ListTagsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } + + pager = client.list_tags(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tag.Tag) for i in results) + + pages = list(client.list_tags(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_tag_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_tag in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_tag] = mock_rpc + + request = {} + client.get_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_tag(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_tag_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tag.Tag() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tag.Tag.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_tag(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/tags/*}" + % client.transport._host, + args[1], + ) + + +def test_get_tag_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tag( + tag.GetTagRequest(), + name="name_value", + ) + + +def test_create_tag_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_tag in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_tag] = mock_rpc + + request = {} + client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_tag(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_tag_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_tag.Tag() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + tag=gda_tag.Tag(name="name_value"), + tag_id="tag_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_tag.Tag.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_tag(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*/packages/*}/tags" + % client.transport._host, + args[1], + ) + + +def test_create_tag_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tag( + gda_tag.CreateTagRequest(), + parent="parent_value", + tag=gda_tag.Tag(name="name_value"), + tag_id="tag_id_value", + ) + + +def test_update_tag_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_tag in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_tag] = mock_rpc + + request = {} + client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_tag(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_tag_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_tag.Tag() + + # get arguments that satisfy an http rule for this method + sample_request = { + "tag": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + tag=gda_tag.Tag(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_tag.Tag.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_tag(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{tag.name=projects/*/locations/*/repositories/*/packages/*/tags/*}" + % client.transport._host, + args[1], + ) + + +def test_update_tag_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag( + gda_tag.UpdateTagRequest(), + tag=gda_tag.Tag(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_tag_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_tag in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_tag] = mock_rpc + + request = {} + client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_tag(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_tag_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_tag(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/packages/*/tags/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_tag_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag( + tag.DeleteTagRequest(), + name="name_value", + ) + + +def test_create_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_rule] = mock_rpc + + request = {} + client.create_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_rule_rest_required_fields(request_type=gda_rule.CreateRuleRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("rule_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gda_rule.Rule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gda_rule.Rule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_rule_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("ruleId",)) & set(("parent",))) + + +def test_create_rule_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_rule.Rule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + rule=gda_rule.Rule(name="name_value"), + rule_id="rule_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_rule.Rule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*}/rules" + % client.transport._host, + args[1], + ) + + +def test_create_rule_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_rule( + gda_rule.CreateRuleRequest(), + parent="parent_value", + rule=gda_rule.Rule(name="name_value"), + rule_id="rule_id_value", + ) + + +def test_list_rules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_rules] = mock_rpc + + request = {} + client.list_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rules_rest_required_fields(request_type=rule.ListRulesRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = rule.ListRulesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = rule.ListRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_rules(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rules_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_rules._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_rules_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule.ListRulesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = rule.ListRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_rules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*}/rules" + % client.transport._host, + args[1], + ) + + +def test_list_rules_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_rules( + rule.ListRulesRequest(), + parent="parent_value", + ) + + +def test_list_rules_rest_pager(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + rule.Rule(), + ], + next_page_token="abc", + ), + rule.ListRulesResponse( + rules=[], + next_page_token="def", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + ], + next_page_token="ghi", + ), + rule.ListRulesResponse( + rules=[ + rule.Rule(), + rule.Rule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(rule.ListRulesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + + pager = client.list_rules(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, rule.Rule) for i in results) + + pages = list(client.list_rules(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_rule] = mock_rpc + + request = {} + client.get_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rule_rest_required_fields(request_type=rule.GetRuleRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = rule.Rule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = rule.Rule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rule_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_rule_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = rule.Rule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = rule.Rule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/rules/*}" + % client.transport._host, + args[1], + ) + + +def test_get_rule_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rule( + rule.GetRuleRequest(), + name="name_value", + ) + + +def test_update_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_rule] = mock_rpc + + request = {} + client.update_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_rule_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_rule.Rule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "rule": { + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + rule=gda_rule.Rule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_rule.Rule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{rule.name=projects/*/locations/*/repositories/*/rules/*}" + % client.transport._host, + args[1], + ) + + +def test_update_rule_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_rule( + gda_rule.UpdateRuleRequest(), + rule=gda_rule.Rule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_rule] = mock_rpc + + request = {} + client.delete_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rule_rest_required_fields(request_type=rule.DeleteRuleRequest): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rule_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_rule_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/rules/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_rule_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_rule( + rule.DeleteRuleRequest(), + name="name_value", + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("options",)) & set(("resource",))) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "permissions", + ) + ) + ) + + +def test_get_project_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_project_settings in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_project_settings + ] = mock_rpc + + request = {} + client.get_project_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_project_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_project_settings_rest_required_fields( + request_type=settings.GetProjectSettingsRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_project_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_project_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = settings.ProjectSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = settings.ProjectSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_project_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_project_settings_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_project_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_project_settings_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = settings.ProjectSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/projectSettings"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = settings.ProjectSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_project_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/projectSettings}" % client.transport._host, args[1] + ) + + +def test_get_project_settings_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_project_settings( + settings.GetProjectSettingsRequest(), + name="name_value", + ) + + +def test_update_project_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_project_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_project_settings + ] = mock_rpc + + request = {} + client.update_project_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_project_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_project_settings_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = settings.ProjectSettings() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_settings": {"name": "projects/sample1/projectSettings"} + } + + # get truthy value for each flattened field + mock_args = dict( + project_settings=settings.ProjectSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = settings.ProjectSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_project_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{project_settings.name=projects/*/projectSettings}" + % client.transport._host, + args[1], + ) + + +def test_update_project_settings_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_project_settings( + settings.UpdateProjectSettingsRequest(), + project_settings=settings.ProjectSettings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_get_vpcsc_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_vpcsc_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_vpcsc_config + ] = mock_rpc + + request = {} + client.get_vpcsc_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_vpcsc_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_vpcsc_config_rest_required_fields( + request_type=vpcsc_config.GetVPCSCConfigRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vpcsc_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vpcsc_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vpcsc_config.VPCSCConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpcsc_config.VPCSCConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_vpcsc_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_vpcsc_config_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_vpcsc_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_vpcsc_config_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpcsc_config.VPCSCConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/vpcscConfig"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vpcsc_config.VPCSCConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_vpcsc_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/vpcscConfig}" % client.transport._host, + args[1], + ) + + +def test_get_vpcsc_config_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vpcsc_config( + vpcsc_config.GetVPCSCConfigRequest(), + name="name_value", + ) + + +def test_update_vpcsc_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_vpcsc_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_vpcsc_config + ] = mock_rpc + + request = {} + client.update_vpcsc_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_vpcsc_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_vpcsc_config_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_vpcsc_config.VPCSCConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "vpcsc_config": {"name": "projects/sample1/locations/sample2/vpcscConfig"} + } + + # get truthy value for each flattened field + mock_args = dict( + vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_vpcsc_config.VPCSCConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_vpcsc_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{vpcsc_config.name=projects/*/locations/*/vpcscConfig}" + % client.transport._host, + args[1], + ) + + +def test_update_vpcsc_config_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_vpcsc_config( + gda_vpcsc_config.UpdateVPCSCConfigRequest(), + vpcsc_config=gda_vpcsc_config.VPCSCConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_package_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_package in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_package] = mock_rpc + + request = {} + client.update_package(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_package(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_package_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gda_package.Package() + + # get arguments that satisfy an http rule for this method + sample_request = { + "package": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + package=gda_package.Package(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gda_package.Package.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_package(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{package.name=projects/*/locations/*/repositories/*/packages/*}" + % client.transport._host, + args[1], + ) + + +def test_update_package_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_package( + gda_package.UpdatePackageRequest(), + package=gda_package.Package(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_attachments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_attachments in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_attachments + ] = mock_rpc + + request = {} + client.list_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_attachments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_attachments_rest_required_fields( + request_type=attachment.ListAttachmentsRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attachments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attachments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = attachment.ListAttachmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = attachment.ListAttachmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_attachments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_attachments_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_attachments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_attachments_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment.ListAttachmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = attachment.ListAttachmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_attachments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*}/attachments" + % client.transport._host, + args[1], + ) + + +def test_list_attachments_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_attachments( + attachment.ListAttachmentsRequest(), + parent="parent_value", + ) + + +def test_list_attachments_rest_pager(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + attachment.Attachment(), + ], + next_page_token="abc", + ), + attachment.ListAttachmentsResponse( + attachments=[], + next_page_token="def", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + ], + next_page_token="ghi", + ), + attachment.ListAttachmentsResponse( + attachments=[ + attachment.Attachment(), + attachment.Attachment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + attachment.ListAttachmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + + pager = client.list_attachments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, attachment.Attachment) for i in results) + + pages = list(client.list_attachments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_attachment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_attachment] = mock_rpc + + request = {} + client.get_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_attachment_rest_required_fields( + request_type=attachment.GetAttachmentRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = attachment.Attachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = attachment.Attachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_attachment_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_attachment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_attachment_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment.Attachment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/attachments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = attachment.Attachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/attachments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_attachment_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_attachment( + attachment.GetAttachmentRequest(), + name="name_value", + ) + + +def test_create_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_attachment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_attachment + ] = mock_rpc + + request = {} + client.create_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_attachment_rest_required_fields( + request_type=gda_attachment.CreateAttachmentRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["attachment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "attachmentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "attachmentId" in jsonified_request + assert jsonified_request["attachmentId"] == request_init["attachment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["attachmentId"] = "attachment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attachment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("attachment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "attachmentId" in jsonified_request + assert jsonified_request["attachmentId"] == "attachment_id_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_attachment(request) + + expected_params = [ + ( + "attachmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_attachment_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_attachment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("attachmentId",)) + & set( + ( + "parent", + "attachmentId", + "attachment", + ) + ) + ) + + +def test_create_attachment_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/repositories/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + attachment=gda_attachment.Attachment(name="name_value"), + attachment_id="attachment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/repositories/*}/attachments" + % client.transport._host, + args[1], + ) + + +def test_create_attachment_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_attachment( + gda_attachment.CreateAttachmentRequest(), + parent="parent_value", + attachment=gda_attachment.Attachment(name="name_value"), + attachment_id="attachment_id_value", + ) + + +def test_delete_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_attachment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_attachment + ] = mock_rpc + + request = {} + client.delete_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_attachment_rest_required_fields( + request_type=attachment.DeleteAttachmentRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_attachment_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_attachment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_attachment_rest_flattened(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/repositories/sample3/attachments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/repositories/*/attachments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_attachment_rest_flattened_error(transport: str = "rest"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_attachment( + attachment.DeleteAttachmentRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ArtifactRegistryClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ArtifactRegistryGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ArtifactRegistryGrpcTransport, + transports.ArtifactRegistryGrpcAsyncIOTransport, + transports.ArtifactRegistryRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ArtifactRegistryClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_docker_images_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + call.return_value = artifact.ListDockerImagesResponse() + client.list_docker_images(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListDockerImagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_docker_image_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_docker_image), "__call__") as call: + call.return_value = artifact.DockerImage() + client.get_docker_image(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetDockerImageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_maven_artifacts_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_maven_artifacts), "__call__" + ) as call: + call.return_value = artifact.ListMavenArtifactsResponse() + client.list_maven_artifacts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListMavenArtifactsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_maven_artifact_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_maven_artifact), "__call__" + ) as call: + call.return_value = artifact.MavenArtifact() + client.get_maven_artifact(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetMavenArtifactRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_npm_packages_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_npm_packages), "__call__" + ) as call: + call.return_value = artifact.ListNpmPackagesResponse() + client.list_npm_packages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListNpmPackagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_npm_package_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_npm_package), "__call__") as call: + call.return_value = artifact.NpmPackage() + client.get_npm_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetNpmPackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_python_packages_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_python_packages), "__call__" + ) as call: + call.return_value = artifact.ListPythonPackagesResponse() + client.list_python_packages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListPythonPackagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_python_package_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_python_package), "__call__" + ) as call: + call.return_value = artifact.PythonPackage() + client.get_python_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetPythonPackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_apt_artifacts_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_apt_artifacts), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_apt_artifacts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = apt_artifact.ImportAptArtifactsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_yum_artifacts_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_yum_artifacts), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_yum_artifacts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = yum_artifact.ImportYumArtifactsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_repositories_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + call.return_value = repository.ListRepositoriesResponse() + client.list_repositories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = repository.ListRepositoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_repository_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + call.return_value = repository.Repository() + client.get_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = repository.GetRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_repository_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_repository.CreateRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_repository_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_repository), "__call__" + ) as call: + call.return_value = gda_repository.Repository() + client.update_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_repository.UpdateRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_repository_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = repository.DeleteRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_packages_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_packages), "__call__") as call: + call.return_value = package.ListPackagesResponse() + client.list_packages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = package.ListPackagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_package_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_package), "__call__") as call: + call.return_value = package.Package() + client.get_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = package.GetPackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_package_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_package), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = package.DeletePackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_versions_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_versions), "__call__") as call: + call.return_value = version.ListVersionsResponse() + client.list_versions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.ListVersionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_version_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_version), "__call__") as call: + call.return_value = version.Version() + client.get_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.GetVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_version_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_version), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.DeleteVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_delete_versions_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_versions), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_delete_versions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.BatchDeleteVersionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_version_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + call.return_value = gda_version.Version() + client.update_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_version.UpdateVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_files_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + call.return_value = file.ListFilesResponse() + client.list_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file.ListFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_file_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + call.return_value = file.File() + client.get_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file.GetFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_file_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file.DeleteFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_file_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_file), "__call__") as call: + call.return_value = gda_file.File() + client.update_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_file.UpdateFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tags_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + call.return_value = tag.ListTagsResponse() + client.list_tags(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tag.ListTagsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_tag_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + call.return_value = tag.Tag() + client.get_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tag.GetTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_tag_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + call.return_value = gda_tag.Tag() + client.create_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_tag.CreateTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_tag_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + call.return_value = gda_tag.Tag() + client.update_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_tag.UpdateTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_tag_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + call.return_value = None + client.delete_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tag.DeleteTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_rule_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: + call.return_value = gda_rule.Rule() + client.create_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_rule.CreateRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_rules_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: + call.return_value = rule.ListRulesResponse() + client.list_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.ListRulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_rule_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: + call.return_value = rule.Rule() + client.get_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.GetRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_rule_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: + call.return_value = gda_rule.Rule() + client.update_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_rule.UpdateRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_rule_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: + call.return_value = None + client.delete_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.DeleteRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_project_settings_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + call.return_value = settings.ProjectSettings() + client.get_project_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = settings.GetProjectSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_project_settings_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + call.return_value = settings.ProjectSettings() + client.update_project_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = settings.UpdateProjectSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_vpcsc_config_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + call.return_value = vpcsc_config.VPCSCConfig() + client.get_vpcsc_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpcsc_config.GetVPCSCConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_vpcsc_config_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + call.return_value = gda_vpcsc_config.VPCSCConfig() + client.update_vpcsc_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_vpcsc_config.UpdateVPCSCConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_package_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + call.return_value = gda_package.Package() + client.update_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_package.UpdatePackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_attachments_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + call.return_value = attachment.ListAttachmentsResponse() + client.list_attachments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.ListAttachmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_attachment_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + call.return_value = attachment.Attachment() + client.get_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.GetAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_attachment_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_attachment.CreateAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_attachment_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.DeleteAttachmentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ArtifactRegistryAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_docker_images_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.ListDockerImagesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_docker_images(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListDockerImagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_docker_image_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_docker_image), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.DockerImage( + name="name_value", + uri="uri_value", + tags=["tags_value"], + image_size_bytes=1699, + media_type="media_type_value", + ) + ) + await client.get_docker_image(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetDockerImageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_maven_artifacts_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_maven_artifacts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.ListMavenArtifactsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_maven_artifacts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListMavenArtifactsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_maven_artifact_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_maven_artifact), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.MavenArtifact( + name="name_value", + pom_uri="pom_uri_value", + group_id="group_id_value", + artifact_id="artifact_id_value", + version="version_value", + ) + ) + await client.get_maven_artifact(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetMavenArtifactRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_npm_packages_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_npm_packages), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.ListNpmPackagesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_npm_packages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListNpmPackagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_npm_package_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_npm_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.NpmPackage( + name="name_value", + package_name="package_name_value", + version="version_value", + tags=["tags_value"], + ) + ) + await client.get_npm_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetNpmPackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_python_packages_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_python_packages), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.ListPythonPackagesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_python_packages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.ListPythonPackagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_python_package_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_python_package), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.PythonPackage( + name="name_value", + uri="uri_value", + package_name="package_name_value", + version="version_value", + ) + ) + await client.get_python_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = artifact.GetPythonPackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_apt_artifacts_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_apt_artifacts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.import_apt_artifacts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = apt_artifact.ImportAptArtifactsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_yum_artifacts_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_yum_artifacts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.import_yum_artifacts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = yum_artifact.ImportYumArtifactsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_repositories_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.ListRepositoriesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_repositories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = repository.ListRepositoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_repository_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.Repository( + name="name_value", + format_=repository.Repository.Format.DOCKER, + description="description_value", + kms_key_name="kms_key_name_value", + mode=repository.Repository.Mode.STANDARD_REPOSITORY, + size_bytes=1089, + satisfies_pzs=True, + cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, + ) + ) + await client.get_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = repository.GetRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_repository_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_repository.CreateRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_repository_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_repository), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_repository.Repository( + name="name_value", + format_=gda_repository.Repository.Format.DOCKER, + description="description_value", + kms_key_name="kms_key_name_value", + mode=gda_repository.Repository.Mode.STANDARD_REPOSITORY, + size_bytes=1089, + satisfies_pzs=True, + cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, + ) + ) + await client.update_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_repository.UpdateRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_repository_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_repository(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = repository.DeleteRepositoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_packages_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_packages), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + package.ListPackagesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_packages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = package.ListPackagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_package_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + package.Package( + name="name_value", + display_name="display_name_value", + ) + ) + await client.get_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = package.GetPackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_package_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = package.DeletePackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_versions_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_versions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + version.ListVersionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_versions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.ListVersionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_version_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_version), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + version.Version( + name="name_value", + description="description_value", + ) + ) + await client.get_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.GetVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_version_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_version), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.DeleteVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_delete_versions_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_versions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.batch_delete_versions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = version.BatchDeleteVersionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_version_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_version.Version( + name="name_value", + description="description_value", + ) + ) + await client.update_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_version.UpdateVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_files_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file.ListFilesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file.ListFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_file_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file.File( + name="name_value", + size_bytes=1089, + owner="owner_value", + ) + ) + await client.get_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file.GetFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_file_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file.DeleteFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_file_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_file.File( + name="name_value", + size_bytes=1089, + owner="owner_value", + ) + ) + await client.update_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_file.UpdateFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_tags_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tag.ListTagsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_tags(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tag.ListTagsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_tag_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tag.Tag( + name="name_value", + version="version_value", + ) + ) + await client.get_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tag.GetTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_tag_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_tag.Tag( + name="name_value", + version="version_value", + ) + ) + await client.create_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_tag.CreateTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_tag_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_tag.Tag( + name="name_value", + version="version_value", + ) + ) + await client.update_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_tag.UpdateTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_tag_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tag.DeleteTagRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_rule_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_rule.Rule( + name="name_value", + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", + ) + ) + await client.create_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_rule.CreateRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_rules_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rule.ListRulesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.ListRulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_rule_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + rule.Rule( + name="name_value", + action=rule.Rule.Action.ALLOW, + operation=rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", + ) + ) + await client.get_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.GetRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_rule_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_rule.Rule( + name="name_value", + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", + ) + ) + await client.update_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_rule.UpdateRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_rule_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.DeleteRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_project_settings_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, + ) + ) + await client.get_project_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = settings.GetProjectSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_project_settings_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_project_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, + ) + ) + await client.update_project_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = settings.UpdateProjectSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_vpcsc_config_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpcsc_config.VPCSCConfig( + name="name_value", + vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + ) + ) + await client.get_vpcsc_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpcsc_config.GetVPCSCConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_vpcsc_config_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_vpcsc_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_vpcsc_config.VPCSCConfig( + name="name_value", + vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + ) + ) + await client.update_vpcsc_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_vpcsc_config.UpdateVPCSCConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_package_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gda_package.Package( + name="name_value", + display_name="display_name_value", + ) + ) + await client.update_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_package.UpdatePackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_attachments_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.ListAttachmentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_attachments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.ListAttachmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_attachment_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.Attachment( + name="name_value", + target="target_value", + type_="type__value", + attachment_namespace="attachment_namespace_value", + files=["files_value"], + oci_version_name="oci_version_name_value", + ) + ) + await client.get_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.GetAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_attachment_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_attachment.CreateAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_attachment_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.DeleteAttachmentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ArtifactRegistryClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_docker_images_rest_bad_request( + request_type=artifact.ListDockerImagesRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_docker_images(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.ListDockerImagesRequest, + dict, + ], +) +def test_list_docker_images_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.ListDockerImagesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.ListDockerImagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_docker_images(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDockerImagesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_docker_images_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_docker_images" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_list_docker_images" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.ListDockerImagesRequest.pb( + artifact.ListDockerImagesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.ListDockerImagesResponse.to_json( + artifact.ListDockerImagesResponse() + ) + req.return_value.content = return_value + + request = artifact.ListDockerImagesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.ListDockerImagesResponse() + + client.list_docker_images( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_docker_image_rest_bad_request(request_type=artifact.GetDockerImageRequest): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/dockerImages/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_docker_image(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.GetDockerImageRequest, + dict, + ], +) +def test_get_docker_image_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/dockerImages/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.DockerImage( + name="name_value", + uri="uri_value", + tags=["tags_value"], + image_size_bytes=1699, + media_type="media_type_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.DockerImage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_docker_image(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, artifact.DockerImage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.tags == ["tags_value"] + assert response.image_size_bytes == 1699 + assert response.media_type == "media_type_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_docker_image_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_docker_image" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_get_docker_image" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.GetDockerImageRequest.pb(artifact.GetDockerImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.DockerImage.to_json(artifact.DockerImage()) + req.return_value.content = return_value + + request = artifact.GetDockerImageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.DockerImage() + + client.get_docker_image( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_maven_artifacts_rest_bad_request( + request_type=artifact.ListMavenArtifactsRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_maven_artifacts(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.ListMavenArtifactsRequest, + dict, + ], +) +def test_list_maven_artifacts_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.ListMavenArtifactsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.ListMavenArtifactsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_maven_artifacts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMavenArtifactsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_maven_artifacts_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_maven_artifacts" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_list_maven_artifacts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.ListMavenArtifactsRequest.pb( + artifact.ListMavenArtifactsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.ListMavenArtifactsResponse.to_json( + artifact.ListMavenArtifactsResponse() + ) + req.return_value.content = return_value + + request = artifact.ListMavenArtifactsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.ListMavenArtifactsResponse() + + client.list_maven_artifacts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_maven_artifact_rest_bad_request( + request_type=artifact.GetMavenArtifactRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/mavenArtifacts/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_maven_artifact(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.GetMavenArtifactRequest, + dict, + ], +) +def test_get_maven_artifact_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/mavenArtifacts/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.MavenArtifact( + name="name_value", + pom_uri="pom_uri_value", + group_id="group_id_value", + artifact_id="artifact_id_value", + version="version_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.MavenArtifact.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_maven_artifact(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, artifact.MavenArtifact) + assert response.name == "name_value" + assert response.pom_uri == "pom_uri_value" + assert response.group_id == "group_id_value" + assert response.artifact_id == "artifact_id_value" + assert response.version == "version_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_maven_artifact_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_maven_artifact" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_get_maven_artifact" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.GetMavenArtifactRequest.pb( + artifact.GetMavenArtifactRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.MavenArtifact.to_json(artifact.MavenArtifact()) + req.return_value.content = return_value + + request = artifact.GetMavenArtifactRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.MavenArtifact() + + client.get_maven_artifact( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_npm_packages_rest_bad_request( + request_type=artifact.ListNpmPackagesRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_npm_packages(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.ListNpmPackagesRequest, + dict, + ], +) +def test_list_npm_packages_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.ListNpmPackagesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.ListNpmPackagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_npm_packages(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNpmPackagesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_npm_packages_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_npm_packages" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_list_npm_packages" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.ListNpmPackagesRequest.pb( + artifact.ListNpmPackagesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.ListNpmPackagesResponse.to_json( + artifact.ListNpmPackagesResponse() + ) + req.return_value.content = return_value + + request = artifact.ListNpmPackagesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.ListNpmPackagesResponse() + + client.list_npm_packages( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_npm_package_rest_bad_request(request_type=artifact.GetNpmPackageRequest): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/npmPackages/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_npm_package(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.GetNpmPackageRequest, + dict, + ], +) +def test_get_npm_package_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/npmPackages/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.NpmPackage( + name="name_value", + package_name="package_name_value", + version="version_value", + tags=["tags_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.NpmPackage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_npm_package(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, artifact.NpmPackage) + assert response.name == "name_value" + assert response.package_name == "package_name_value" + assert response.version == "version_value" + assert response.tags == ["tags_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_npm_package_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_npm_package" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_get_npm_package" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.GetNpmPackageRequest.pb(artifact.GetNpmPackageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.NpmPackage.to_json(artifact.NpmPackage()) + req.return_value.content = return_value + + request = artifact.GetNpmPackageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.NpmPackage() + + client.get_npm_package( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_python_packages_rest_bad_request( + request_type=artifact.ListPythonPackagesRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_python_packages(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.ListPythonPackagesRequest, + dict, + ], +) +def test_list_python_packages_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.ListPythonPackagesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.ListPythonPackagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_python_packages(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPythonPackagesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_python_packages_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_python_packages" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_list_python_packages" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.ListPythonPackagesRequest.pb( + artifact.ListPythonPackagesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.ListPythonPackagesResponse.to_json( + artifact.ListPythonPackagesResponse() + ) + req.return_value.content = return_value + + request = artifact.ListPythonPackagesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.ListPythonPackagesResponse() + + client.list_python_packages( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_python_package_rest_bad_request( + request_type=artifact.GetPythonPackageRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/pythonPackages/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_python_package(request) + + +@pytest.mark.parametrize( + "request_type", + [ + artifact.GetPythonPackageRequest, + dict, + ], +) +def test_get_python_package_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/pythonPackages/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = artifact.PythonPackage( + name="name_value", + uri="uri_value", + package_name="package_name_value", + version="version_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = artifact.PythonPackage.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_python_package(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, artifact.PythonPackage) + assert response.name == "name_value" + assert response.uri == "uri_value" + assert response.package_name == "package_name_value" + assert response.version == "version_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_python_package_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_python_package" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_get_python_package" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = artifact.GetPythonPackageRequest.pb( + artifact.GetPythonPackageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = artifact.PythonPackage.to_json(artifact.PythonPackage()) + req.return_value.content = return_value + + request = artifact.GetPythonPackageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = artifact.PythonPackage() + + client.get_python_package( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_apt_artifacts_rest_bad_request( + request_type=apt_artifact.ImportAptArtifactsRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.import_apt_artifacts(request) + + +@pytest.mark.parametrize( + "request_type", + [ + apt_artifact.ImportAptArtifactsRequest, + dict, + ], +) +def test_import_apt_artifacts_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - ) - await client.test_iam_permissions(request=None) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_apt_artifacts(request) + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_project_settings_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_apt_artifacts_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), ) + client = ArtifactRegistryClient(transport=transport) - # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_project_settings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings( - name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, - ) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_import_apt_artifacts" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_import_apt_artifacts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apt_artifact.ImportAptArtifactsRequest.pb( + apt_artifact.ImportAptArtifactsRequest() ) - await client.get_project_settings(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = settings.GetProjectSettingsRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value - assert args[0] == request_msg + request = apt_artifact.ImportAptArtifactsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + client.import_apt_artifacts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_project_settings_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + pre.assert_called_once() + post.assert_called_once() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_project_settings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - settings.ProjectSettings( - name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, - ) - ) - await client.update_project_settings(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = settings.UpdateProjectSettingsRequest() +def test_import_yum_artifacts_rest_bad_request( + request_type=yum_artifact.ImportYumArtifactsRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) - assert args[0] == request_msg + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.import_yum_artifacts(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_vpcsc_config_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +@pytest.mark.parametrize( + "request_type", + [ + yum_artifact.ImportYumArtifactsRequest, + dict, + ], +) +def test_import_yum_artifacts_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_vpcsc_config), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vpcsc_config.VPCSCConfig( - name="name_value", - vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, - ) - ) - await client.get_vpcsc_config(request=None) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = vpcsc_config.GetVPCSCConfigRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_yum_artifacts(request) + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_vpcsc_config_empty_call_grpc_asyncio(): - client = ArtifactRegistryAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_yum_artifacts_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), ) + client = ArtifactRegistryClient(transport=transport) - # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_vpcsc_config), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gda_vpcsc_config.VPCSCConfig( - name="name_value", - vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, - ) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_import_yum_artifacts" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_import_yum_artifacts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = yum_artifact.ImportYumArtifactsRequest.pb( + yum_artifact.ImportYumArtifactsRequest() ) - await client.update_vpcsc_config(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gda_vpcsc_config.UpdateVPCSCConfigRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value - assert args[0] == request_msg + request = yum_artifact.ImportYumArtifactsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + client.import_yum_artifacts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = ArtifactRegistryClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() -def test_list_docker_images_rest_bad_request( - request_type=artifact.ListDockerImagesRequest, +def test_list_repositories_rest_bad_request( + request_type=repository.ListRepositoriesRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22664,29 +31596,29 @@ def test_list_docker_images_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_docker_images(request) + client.list_repositories(request) @pytest.mark.parametrize( "request_type", [ - artifact.ListDockerImagesRequest, + repository.ListRepositoriesRequest, dict, ], ) -def test_list_docker_images_rest_call_success(request_type): +def test_list_repositories_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.ListDockerImagesResponse( + return_value = repository.ListRepositoriesResponse( next_page_token="next_page_token_value", ) @@ -22695,19 +31627,19 @@ def test_list_docker_images_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = artifact.ListDockerImagesResponse.pb(return_value) + return_value = repository.ListRepositoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_docker_images(request) + response = client.list_repositories(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDockerImagesPager) + assert isinstance(response, pagers.ListRepositoriesPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_docker_images_rest_interceptors(null_interceptor): +def test_list_repositories_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22721,14 +31653,14 @@ def test_list_docker_images_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_docker_images" + transports.ArtifactRegistryRestInterceptor, "post_list_repositories" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_docker_images" + transports.ArtifactRegistryRestInterceptor, "pre_list_repositories" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.ListDockerImagesRequest.pb( - artifact.ListDockerImagesRequest() + pb_message = repository.ListRepositoriesRequest.pb( + repository.ListRepositoriesRequest() ) transcode.return_value = { "method": "post", @@ -22739,20 +31671,20 @@ def test_list_docker_images_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.ListDockerImagesResponse.to_json( - artifact.ListDockerImagesResponse() + return_value = repository.ListRepositoriesResponse.to_json( + repository.ListRepositoriesResponse() ) req.return_value.content = return_value - request = artifact.ListDockerImagesRequest() + request = repository.ListRepositoriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.ListDockerImagesResponse() + post.return_value = repository.ListRepositoriesResponse() - client.list_docker_images( + client.list_repositories( request, metadata=[ ("key", "val"), @@ -22764,14 +31696,12 @@ def test_list_docker_images_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_docker_image_rest_bad_request(request_type=artifact.GetDockerImageRequest): +def test_get_repository_rest_bad_request(request_type=repository.GetRepositoryRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/dockerImages/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22785,36 +31715,39 @@ def test_get_docker_image_rest_bad_request(request_type=artifact.GetDockerImageR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_docker_image(request) + client.get_repository(request) @pytest.mark.parametrize( "request_type", [ - artifact.GetDockerImageRequest, + repository.GetRepositoryRequest, dict, ], ) -def test_get_docker_image_rest_call_success(request_type): +def test_get_repository_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/dockerImages/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.DockerImage( + return_value = repository.Repository( name="name_value", - uri="uri_value", - tags=["tags_value"], - image_size_bytes=1699, - media_type="media_type_value", + format_=repository.Repository.Format.DOCKER, + description="description_value", + kms_key_name="kms_key_name_value", + mode=repository.Repository.Mode.STANDARD_REPOSITORY, + size_bytes=1089, + satisfies_pzs=True, + cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -22822,23 +31755,28 @@ def test_get_docker_image_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = artifact.DockerImage.pb(return_value) + return_value = repository.Repository.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_docker_image(request) + response = client.get_repository(request) # Establish that the response is the type that we expect. - assert isinstance(response, artifact.DockerImage) + assert isinstance(response, repository.Repository) assert response.name == "name_value" - assert response.uri == "uri_value" - assert response.tags == ["tags_value"] - assert response.image_size_bytes == 1699 - assert response.media_type == "media_type_value" + assert response.format_ == repository.Repository.Format.DOCKER + assert response.description == "description_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.mode == repository.Repository.Mode.STANDARD_REPOSITORY + assert response.size_bytes == 1089 + assert response.satisfies_pzs is True + assert response.cleanup_policy_dry_run is True + assert response.disallow_unspecified_mode is True + assert response.satisfies_pzi is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_docker_image_rest_interceptors(null_interceptor): +def test_get_repository_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22852,13 +31790,15 @@ def test_get_docker_image_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_docker_image" + transports.ArtifactRegistryRestInterceptor, "post_get_repository" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_docker_image" + transports.ArtifactRegistryRestInterceptor, "pre_get_repository" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.GetDockerImageRequest.pb(artifact.GetDockerImageRequest()) + pb_message = repository.GetRepositoryRequest.pb( + repository.GetRepositoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22868,18 +31808,18 @@ def test_get_docker_image_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.DockerImage.to_json(artifact.DockerImage()) + return_value = repository.Repository.to_json(repository.Repository()) req.return_value.content = return_value - request = artifact.GetDockerImageRequest() + request = repository.GetRepositoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.DockerImage() + post.return_value = repository.Repository() - client.get_docker_image( + client.get_repository( request, metadata=[ ("key", "val"), @@ -22891,14 +31831,14 @@ def test_get_docker_image_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_maven_artifacts_rest_bad_request( - request_type=artifact.ListMavenArtifactsRequest, +def test_create_repository_rest_bad_request( + request_type=gda_repository.CreateRepositoryRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22912,50 +31852,181 @@ def test_list_maven_artifacts_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_maven_artifacts(request) + client.create_repository(request) @pytest.mark.parametrize( "request_type", [ - artifact.ListMavenArtifactsRequest, + gda_repository.CreateRepositoryRequest, dict, ], ) -def test_list_maven_artifacts_rest_call_success(request_type): +def test_create_repository_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["repository"] = { + "maven_config": {"allow_snapshot_overwrites": True, "version_policy": 1}, + "docker_config": {"immutable_tags": True}, + "virtual_repository_config": { + "upstream_policies": [ + {"id": "id_value", "repository": "repository_value", "priority": 898} + ] + }, + "remote_repository_config": { + "docker_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "maven_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "npm_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "python_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "apt_repository": { + "public_repository": { + "repository_base": 1, + "repository_path": "repository_path_value", + }, + "custom_repository": {"uri": "uri_value"}, + }, + "yum_repository": { + "public_repository": { + "repository_base": 1, + "repository_path": "repository_path_value", + }, + "custom_repository": {"uri": "uri_value"}, + }, + "common_repository": {"uri": "uri_value"}, + "description": "description_value", + "upstream_credentials": { + "username_password_credentials": { + "username": "username_value", + "password_secret_version": "password_secret_version_value", + } + }, + "disable_upstream_validation": True, + }, + "name": "name_value", + "format_": 1, + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "kms_key_name": "kms_key_name_value", + "mode": 1, + "cleanup_policies": {}, + "size_bytes": 1089, + "satisfies_pzs": True, + "cleanup_policy_dry_run": True, + "vulnerability_scanning_config": { + "enablement_config": 1, + "last_enable_time": {}, + "enablement_state": 1, + "enablement_state_reason": "enablement_state_reason_value", + }, + "disallow_unspecified_mode": True, + "satisfies_pzi": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_repository.CreateRepositoryRequest.meta.fields["repository"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["repository"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["repository"][field])): + del request_init["repository"][field][i][subfield] + else: + del request_init["repository"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.ListMavenArtifactsResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = artifact.ListMavenArtifactsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_maven_artifacts(request) + response = client.create_repository(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMavenArtifactsPager) - assert response.next_page_token == "next_page_token_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_maven_artifacts_rest_interceptors(null_interceptor): +def test_create_repository_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22969,14 +32040,16 @@ def test_list_maven_artifacts_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_maven_artifacts" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_create_repository" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_maven_artifacts" + transports.ArtifactRegistryRestInterceptor, "pre_create_repository" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.ListMavenArtifactsRequest.pb( - artifact.ListMavenArtifactsRequest() + pb_message = gda_repository.CreateRepositoryRequest.pb( + gda_repository.CreateRepositoryRequest() ) transcode.return_value = { "method": "post", @@ -22987,20 +32060,18 @@ def test_list_maven_artifacts_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.ListMavenArtifactsResponse.to_json( - artifact.ListMavenArtifactsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = artifact.ListMavenArtifactsRequest() + request = gda_repository.CreateRepositoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.ListMavenArtifactsResponse() + post.return_value = operations_pb2.Operation() - client.list_maven_artifacts( + client.create_repository( request, metadata=[ ("key", "val"), @@ -23012,15 +32083,17 @@ def test_list_maven_artifacts_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_maven_artifact_rest_bad_request( - request_type=artifact.GetMavenArtifactRequest, +def test_update_repository_rest_bad_request( + request_type=gda_repository.UpdateRepositoryRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/mavenArtifacts/sample4" + "repository": { + "name": "projects/sample1/locations/sample2/repositories/sample3" + } } request = request_type(**request_init) @@ -23035,36 +32108,180 @@ def test_get_maven_artifact_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_maven_artifact(request) + client.update_repository(request) @pytest.mark.parametrize( "request_type", [ - artifact.GetMavenArtifactRequest, + gda_repository.UpdateRepositoryRequest, dict, ], ) -def test_get_maven_artifact_rest_call_success(request_type): +def test_update_repository_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/mavenArtifacts/sample4" + "repository": { + "name": "projects/sample1/locations/sample2/repositories/sample3" + } + } + request_init["repository"] = { + "maven_config": {"allow_snapshot_overwrites": True, "version_policy": 1}, + "docker_config": {"immutable_tags": True}, + "virtual_repository_config": { + "upstream_policies": [ + {"id": "id_value", "repository": "repository_value", "priority": 898} + ] + }, + "remote_repository_config": { + "docker_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "maven_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "npm_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "python_repository": { + "public_repository": 1, + "custom_repository": {"uri": "uri_value"}, + }, + "apt_repository": { + "public_repository": { + "repository_base": 1, + "repository_path": "repository_path_value", + }, + "custom_repository": {"uri": "uri_value"}, + }, + "yum_repository": { + "public_repository": { + "repository_base": 1, + "repository_path": "repository_path_value", + }, + "custom_repository": {"uri": "uri_value"}, + }, + "common_repository": {"uri": "uri_value"}, + "description": "description_value", + "upstream_credentials": { + "username_password_credentials": { + "username": "username_value", + "password_secret_version": "password_secret_version_value", + } + }, + "disable_upstream_validation": True, + }, + "name": "projects/sample1/locations/sample2/repositories/sample3", + "format_": 1, + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "kms_key_name": "kms_key_name_value", + "mode": 1, + "cleanup_policies": {}, + "size_bytes": 1089, + "satisfies_pzs": True, + "cleanup_policy_dry_run": True, + "vulnerability_scanning_config": { + "enablement_config": 1, + "last_enable_time": {}, + "enablement_state": 1, + "enablement_state_reason": "enablement_state_reason_value", + }, + "disallow_unspecified_mode": True, + "satisfies_pzi": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_repository.UpdateRepositoryRequest.meta.fields["repository"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["repository"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["repository"][field])): + del request_init["repository"][field][i][subfield] + else: + del request_init["repository"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.MavenArtifact( + return_value = gda_repository.Repository( name="name_value", - pom_uri="pom_uri_value", - group_id="group_id_value", - artifact_id="artifact_id_value", - version="version_value", + format_=gda_repository.Repository.Format.DOCKER, + description="description_value", + kms_key_name="kms_key_name_value", + mode=gda_repository.Repository.Mode.STANDARD_REPOSITORY, + size_bytes=1089, + satisfies_pzs=True, + cleanup_policy_dry_run=True, + disallow_unspecified_mode=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -23072,23 +32289,28 @@ def test_get_maven_artifact_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = artifact.MavenArtifact.pb(return_value) + return_value = gda_repository.Repository.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_maven_artifact(request) + response = client.update_repository(request) # Establish that the response is the type that we expect. - assert isinstance(response, artifact.MavenArtifact) + assert isinstance(response, gda_repository.Repository) assert response.name == "name_value" - assert response.pom_uri == "pom_uri_value" - assert response.group_id == "group_id_value" - assert response.artifact_id == "artifact_id_value" - assert response.version == "version_value" + assert response.format_ == gda_repository.Repository.Format.DOCKER + assert response.description == "description_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.mode == gda_repository.Repository.Mode.STANDARD_REPOSITORY + assert response.size_bytes == 1089 + assert response.satisfies_pzs is True + assert response.cleanup_policy_dry_run is True + assert response.disallow_unspecified_mode is True + assert response.satisfies_pzi is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_maven_artifact_rest_interceptors(null_interceptor): +def test_update_repository_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23102,14 +32324,14 @@ def test_get_maven_artifact_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_maven_artifact" + transports.ArtifactRegistryRestInterceptor, "post_update_repository" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_maven_artifact" + transports.ArtifactRegistryRestInterceptor, "pre_update_repository" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.GetMavenArtifactRequest.pb( - artifact.GetMavenArtifactRequest() + pb_message = gda_repository.UpdateRepositoryRequest.pb( + gda_repository.UpdateRepositoryRequest() ) transcode.return_value = { "method": "post", @@ -23120,18 +32342,18 @@ def test_get_maven_artifact_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.MavenArtifact.to_json(artifact.MavenArtifact()) + return_value = gda_repository.Repository.to_json(gda_repository.Repository()) req.return_value.content = return_value - request = artifact.GetMavenArtifactRequest() + request = gda_repository.UpdateRepositoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.MavenArtifact() + post.return_value = gda_repository.Repository() - client.get_maven_artifact( + client.update_repository( request, metadata=[ ("key", "val"), @@ -23143,14 +32365,14 @@ def test_get_maven_artifact_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_npm_packages_rest_bad_request( - request_type=artifact.ListNpmPackagesRequest, +def test_delete_repository_rest_bad_request( + request_type=repository.DeleteRepositoryRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23164,50 +32386,44 @@ def test_list_npm_packages_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_npm_packages(request) + client.delete_repository(request) @pytest.mark.parametrize( "request_type", [ - artifact.ListNpmPackagesRequest, + repository.DeleteRepositoryRequest, dict, ], ) -def test_list_npm_packages_rest_call_success(request_type): +def test_delete_repository_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.ListNpmPackagesResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = artifact.ListNpmPackagesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_npm_packages(request) + response = client.delete_repository(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNpmPackagesPager) - assert response.next_page_token == "next_page_token_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_npm_packages_rest_interceptors(null_interceptor): +def test_delete_repository_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23221,14 +32437,16 @@ def test_list_npm_packages_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_npm_packages" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_repository" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_npm_packages" + transports.ArtifactRegistryRestInterceptor, "pre_delete_repository" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.ListNpmPackagesRequest.pb( - artifact.ListNpmPackagesRequest() + pb_message = repository.DeleteRepositoryRequest.pb( + repository.DeleteRepositoryRequest() ) transcode.return_value = { "method": "post", @@ -23239,20 +32457,18 @@ def test_list_npm_packages_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.ListNpmPackagesResponse.to_json( - artifact.ListNpmPackagesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = artifact.ListNpmPackagesRequest() + request = repository.DeleteRepositoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.ListNpmPackagesResponse() + post.return_value = operations_pb2.Operation() - client.list_npm_packages( + client.delete_repository( request, metadata=[ ("key", "val"), @@ -23264,14 +32480,12 @@ def test_list_npm_packages_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_npm_package_rest_bad_request(request_type=artifact.GetNpmPackageRequest): +def test_list_packages_rest_bad_request(request_type=package.ListPackagesRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/npmPackages/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23285,35 +32499,30 @@ def test_get_npm_package_rest_bad_request(request_type=artifact.GetNpmPackageReq response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_npm_package(request) + client.list_packages(request) @pytest.mark.parametrize( "request_type", [ - artifact.GetNpmPackageRequest, + package.ListPackagesRequest, dict, ], ) -def test_get_npm_package_rest_call_success(request_type): +def test_list_packages_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/npmPackages/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.NpmPackage( - name="name_value", - package_name="package_name_value", - version="version_value", - tags=["tags_value"], + return_value = package.ListPackagesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -23321,22 +32530,19 @@ def test_get_npm_package_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = artifact.NpmPackage.pb(return_value) + return_value = package.ListPackagesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_npm_package(request) + response = client.list_packages(request) # Establish that the response is the type that we expect. - assert isinstance(response, artifact.NpmPackage) - assert response.name == "name_value" - assert response.package_name == "package_name_value" - assert response.version == "version_value" - assert response.tags == ["tags_value"] + assert isinstance(response, pagers.ListPackagesPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_npm_package_rest_interceptors(null_interceptor): +def test_list_packages_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23350,13 +32556,13 @@ def test_get_npm_package_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_npm_package" + transports.ArtifactRegistryRestInterceptor, "post_list_packages" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_npm_package" + transports.ArtifactRegistryRestInterceptor, "pre_list_packages" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.GetNpmPackageRequest.pb(artifact.GetNpmPackageRequest()) + pb_message = package.ListPackagesRequest.pb(package.ListPackagesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23366,18 +32572,20 @@ def test_get_npm_package_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.NpmPackage.to_json(artifact.NpmPackage()) + return_value = package.ListPackagesResponse.to_json( + package.ListPackagesResponse() + ) req.return_value.content = return_value - request = artifact.GetNpmPackageRequest() + request = package.ListPackagesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.NpmPackage() + post.return_value = package.ListPackagesResponse() - client.get_npm_package( + client.list_packages( request, metadata=[ ("key", "val"), @@ -23389,14 +32597,14 @@ def test_get_npm_package_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_python_packages_rest_bad_request( - request_type=artifact.ListPythonPackagesRequest, -): +def test_get_package_rest_bad_request(request_type=package.GetPackageRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23410,30 +32618,33 @@ def test_list_python_packages_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_python_packages(request) + client.get_package(request) @pytest.mark.parametrize( "request_type", [ - artifact.ListPythonPackagesRequest, + package.GetPackageRequest, dict, ], ) -def test_list_python_packages_rest_call_success(request_type): +def test_get_package_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.ListPythonPackagesResponse( - next_page_token="next_page_token_value", + return_value = package.Package( + name="name_value", + display_name="display_name_value", ) # Wrap the value into a proper Response obj @@ -23441,19 +32652,20 @@ def test_list_python_packages_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = artifact.ListPythonPackagesResponse.pb(return_value) + return_value = package.Package.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_python_packages(request) + response = client.get_package(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPythonPackagesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, package.Package) + assert response.name == "name_value" + assert response.display_name == "display_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_python_packages_rest_interceptors(null_interceptor): +def test_get_package_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23467,15 +32679,13 @@ def test_list_python_packages_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_python_packages" + transports.ArtifactRegistryRestInterceptor, "post_get_package" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_python_packages" + transports.ArtifactRegistryRestInterceptor, "pre_get_package" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.ListPythonPackagesRequest.pb( - artifact.ListPythonPackagesRequest() - ) + pb_message = package.GetPackageRequest.pb(package.GetPackageRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23485,20 +32695,18 @@ def test_list_python_packages_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.ListPythonPackagesResponse.to_json( - artifact.ListPythonPackagesResponse() - ) + return_value = package.Package.to_json(package.Package()) req.return_value.content = return_value - request = artifact.ListPythonPackagesRequest() + request = package.GetPackageRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.ListPythonPackagesResponse() + post.return_value = package.Package() - client.list_python_packages( + client.get_package( request, metadata=[ ("key", "val"), @@ -23510,15 +32718,13 @@ def test_list_python_packages_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_python_package_rest_bad_request( - request_type=artifact.GetPythonPackageRequest, -): +def test_delete_package_rest_bad_request(request_type=package.DeletePackageRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/pythonPackages/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" } request = request_type(**request_init) @@ -23533,58 +32739,46 @@ def test_get_python_package_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_python_package(request) + client.delete_package(request) @pytest.mark.parametrize( "request_type", [ - artifact.GetPythonPackageRequest, + package.DeletePackageRequest, dict, ], ) -def test_get_python_package_rest_call_success(request_type): +def test_delete_package_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/pythonPackages/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = artifact.PythonPackage( - name="name_value", - uri="uri_value", - package_name="package_name_value", - version="version_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = artifact.PythonPackage.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_python_package(request) + response = client.delete_package(request) # Establish that the response is the type that we expect. - assert isinstance(response, artifact.PythonPackage) - assert response.name == "name_value" - assert response.uri == "uri_value" - assert response.package_name == "package_name_value" - assert response.version == "version_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_python_package_rest_interceptors(null_interceptor): +def test_delete_package_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23598,15 +32792,15 @@ def test_get_python_package_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_python_package" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_package" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_python_package" + transports.ArtifactRegistryRestInterceptor, "pre_delete_package" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = artifact.GetPythonPackageRequest.pb( - artifact.GetPythonPackageRequest() - ) + pb_message = package.DeletePackageRequest.pb(package.DeletePackageRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23616,18 +32810,18 @@ def test_get_python_package_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = artifact.PythonPackage.to_json(artifact.PythonPackage()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = artifact.GetPythonPackageRequest() + request = package.DeletePackageRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = artifact.PythonPackage() + post.return_value = operations_pb2.Operation() - client.get_python_package( + client.delete_package( request, metadata=[ ("key", "val"), @@ -23639,14 +32833,14 @@ def test_get_python_package_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_apt_artifacts_rest_bad_request( - request_type=apt_artifact.ImportAptArtifactsRequest, -): +def test_list_versions_rest_bad_request(request_type=version.ListVersionsRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23660,44 +32854,52 @@ def test_import_apt_artifacts_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.import_apt_artifacts(request) + client.list_versions(request) @pytest.mark.parametrize( "request_type", [ - apt_artifact.ImportAptArtifactsRequest, + version.ListVersionsRequest, dict, ], ) -def test_import_apt_artifacts_rest_call_success(request_type): +def test_list_versions_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = version.ListVersionsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = version.ListVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_apt_artifacts(request) + response = client.list_versions(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListVersionsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_apt_artifacts_rest_interceptors(null_interceptor): +def test_list_versions_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23711,17 +32913,13 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_import_apt_artifacts" + transports.ArtifactRegistryRestInterceptor, "post_list_versions" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_import_apt_artifacts" + transports.ArtifactRegistryRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = apt_artifact.ImportAptArtifactsRequest.pb( - apt_artifact.ImportAptArtifactsRequest() - ) + pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23731,18 +32929,20 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = version.ListVersionsResponse.to_json( + version.ListVersionsResponse() + ) req.return_value.content = return_value - request = apt_artifact.ImportAptArtifactsRequest() + request = version.ListVersionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = version.ListVersionsResponse() - client.import_apt_artifacts( + client.list_versions( request, metadata=[ ("key", "val"), @@ -23754,14 +32954,14 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_yum_artifacts_rest_bad_request( - request_type=yum_artifact.ImportYumArtifactsRequest, -): +def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23775,44 +32975,54 @@ def test_import_yum_artifacts_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.import_yum_artifacts(request) + client.get_version(request) @pytest.mark.parametrize( "request_type", [ - yum_artifact.ImportYumArtifactsRequest, + version.GetVersionRequest, dict, ], ) -def test_import_yum_artifacts_rest_call_success(request_type): +def test_get_version_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = version.Version( + name="name_value", + description="description_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = version.Version.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_yum_artifacts(request) + response = client.get_version(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, version.Version) + assert response.name == "name_value" + assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_yum_artifacts_rest_interceptors(null_interceptor): +def test_get_version_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23825,18 +33035,14 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_import_yum_artifacts" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_version" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_import_yum_artifacts" + transports.ArtifactRegistryRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = yum_artifact.ImportYumArtifactsRequest.pb( - yum_artifact.ImportYumArtifactsRequest() - ) + pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23846,18 +33052,18 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = version.Version.to_json(version.Version()) req.return_value.content = return_value - request = yum_artifact.ImportYumArtifactsRequest() + request = version.GetVersionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = version.Version() - client.import_yum_artifacts( + client.get_version( request, metadata=[ ("key", "val"), @@ -23869,14 +33075,14 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_repositories_rest_bad_request( - request_type=repository.ListRepositoriesRequest, -): +def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23890,50 +33096,46 @@ def test_list_repositories_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_repositories(request) + client.delete_version(request) @pytest.mark.parametrize( "request_type", [ - repository.ListRepositoriesRequest, + version.DeleteVersionRequest, dict, ], ) -def test_list_repositories_rest_call_success(request_type): +def test_delete_version_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = repository.ListRepositoriesResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = repository.ListRepositoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_repositories(request) + response = client.delete_version(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRepositoriesPager) - assert response.next_page_token == "next_page_token_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_repositories_rest_interceptors(null_interceptor): +def test_delete_version_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23947,15 +33149,15 @@ def test_list_repositories_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_repositories" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_version" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_repositories" + transports.ArtifactRegistryRestInterceptor, "pre_delete_version" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = repository.ListRepositoriesRequest.pb( - repository.ListRepositoriesRequest() - ) + pb_message = version.DeleteVersionRequest.pb(version.DeleteVersionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23965,20 +33167,18 @@ def test_list_repositories_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = repository.ListRepositoriesResponse.to_json( - repository.ListRepositoriesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = repository.ListRepositoriesRequest() + request = version.DeleteVersionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = repository.ListRepositoriesResponse() + post.return_value = operations_pb2.Operation() - client.list_repositories( + client.delete_version( request, metadata=[ ("key", "val"), @@ -23990,12 +33190,16 @@ def test_list_repositories_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_repository_rest_bad_request(request_type=repository.GetRepositoryRequest): +def test_batch_delete_versions_rest_bad_request( + request_type=version.BatchDeleteVersionsRequest, +): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24009,64 +33213,46 @@ def test_get_repository_rest_bad_request(request_type=repository.GetRepositoryRe response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_repository(request) + client.batch_delete_versions(request) @pytest.mark.parametrize( "request_type", [ - repository.GetRepositoryRequest, + version.BatchDeleteVersionsRequest, dict, ], ) -def test_get_repository_rest_call_success(request_type): +def test_batch_delete_versions_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = repository.Repository( - name="name_value", - format_=repository.Repository.Format.DOCKER, - description="description_value", - kms_key_name="kms_key_name_value", - mode=repository.Repository.Mode.STANDARD_REPOSITORY, - size_bytes=1089, - satisfies_pzs=True, - cleanup_policy_dry_run=True, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = repository.Repository.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_repository(request) + response = client.batch_delete_versions(request) # Establish that the response is the type that we expect. - assert isinstance(response, repository.Repository) - assert response.name == "name_value" - assert response.format_ == repository.Repository.Format.DOCKER - assert response.description == "description_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.mode == repository.Repository.Mode.STANDARD_REPOSITORY - assert response.size_bytes == 1089 - assert response.satisfies_pzs is True - assert response.cleanup_policy_dry_run is True + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_repository_rest_interceptors(null_interceptor): +def test_batch_delete_versions_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24080,14 +33266,16 @@ def test_get_repository_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_repository" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_batch_delete_versions" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_repository" + transports.ArtifactRegistryRestInterceptor, "pre_batch_delete_versions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = repository.GetRepositoryRequest.pb( - repository.GetRepositoryRequest() + pb_message = version.BatchDeleteVersionsRequest.pb( + version.BatchDeleteVersionsRequest() ) transcode.return_value = { "method": "post", @@ -24098,18 +33286,18 @@ def test_get_repository_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = repository.Repository.to_json(repository.Repository()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = repository.GetRepositoryRequest() + request = version.BatchDeleteVersionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = repository.Repository() + post.return_value = operations_pb2.Operation() - client.get_repository( + client.batch_delete_versions( request, metadata=[ ("key", "val"), @@ -24121,14 +33309,16 @@ def test_get_repository_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_repository_rest_bad_request( - request_type=gda_repository.CreateRepositoryRequest, -): +def test_update_version_rest_bad_request(request_type=gda_version.UpdateVersionRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24142,75 +33332,42 @@ def test_create_repository_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.create_repository(request) + client.update_version(request) @pytest.mark.parametrize( "request_type", [ - gda_repository.CreateRepositoryRequest, + gda_version.UpdateVersionRequest, dict, ], ) -def test_create_repository_rest_call_success(request_type): +def test_update_version_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["repository"] = { - "maven_config": {"allow_snapshot_overwrites": True, "version_policy": 1}, - "docker_config": {"immutable_tags": True}, - "virtual_repository_config": { - "upstream_policies": [ - {"id": "id_value", "repository": "repository_value", "priority": 898} - ] - }, - "remote_repository_config": { - "docker_repository": {"public_repository": 1}, - "maven_repository": {"public_repository": 1}, - "npm_repository": {"public_repository": 1}, - "python_repository": {"public_repository": 1}, - "apt_repository": { - "public_repository": { - "repository_base": 1, - "repository_path": "repository_path_value", - } - }, - "yum_repository": { - "public_repository": { - "repository_base": 1, - "repository_path": "repository_path_value", - } - }, - "description": "description_value", - "upstream_credentials": { - "username_password_credentials": { - "username": "username_value", - "password_secret_version": "password_secret_version_value", - } - }, - }, - "name": "name_value", - "format_": 1, + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + } + } + request_init["version"] = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5", "description": "description_value", - "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "kms_key_name": "kms_key_name_value", - "mode": 1, - "cleanup_policies": {}, - "size_bytes": 1089, - "satisfies_pzs": True, - "cleanup_policy_dry_run": True, + "related_tags": [{"name": "name_value", "version": "version_value"}], + "metadata": {"fields": {}}, + "annotations": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gda_repository.CreateRepositoryRequest.meta.fields["repository"] + test_field = gda_version.UpdateVersionRequest.meta.fields["version"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -24238,7 +33395,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["repository"].items(): # pragma: NO COVER + for field, value in request_init["version"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -24268,31 +33425,39 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["repository"][field])): - del request_init["repository"][field][i][subfield] + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] else: - del request_init["repository"][field][subfield] + del request_init["version"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = gda_version.Version( + name="name_value", + description="description_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gda_version.Version.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_repository(request) + response = client.update_version(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, gda_version.Version) + assert response.name == "name_value" + assert response.description == "description_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_repository_rest_interceptors(null_interceptor): +def test_update_version_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24306,16 +33471,14 @@ def test_create_repository_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_create_repository" + transports.ArtifactRegistryRestInterceptor, "post_update_version" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_create_repository" + transports.ArtifactRegistryRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gda_repository.CreateRepositoryRequest.pb( - gda_repository.CreateRepositoryRequest() + pb_message = gda_version.UpdateVersionRequest.pb( + gda_version.UpdateVersionRequest() ) transcode.return_value = { "method": "post", @@ -24326,18 +33489,18 @@ def test_create_repository_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = gda_version.Version.to_json(gda_version.Version()) req.return_value.content = return_value - request = gda_repository.CreateRepositoryRequest() + request = gda_version.UpdateVersionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = gda_version.Version() - client.create_repository( + client.update_version( request, metadata=[ ("key", "val"), @@ -24349,18 +33512,12 @@ def test_create_repository_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_repository_rest_bad_request( - request_type=gda_repository.UpdateRepositoryRequest, -): +def test_list_files_rest_bad_request(request_type=file.ListFilesRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "repository": { - "name": "projects/sample1/locations/sample2/repositories/sample3" - } - } + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24374,154 +33531,151 @@ def test_update_repository_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.update_repository(request) + client.list_files(request) @pytest.mark.parametrize( "request_type", [ - gda_repository.UpdateRepositoryRequest, + file.ListFilesRequest, dict, ], ) -def test_update_repository_rest_call_success(request_type): +def test_list_files_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "repository": { - "name": "projects/sample1/locations/sample2/repositories/sample3" + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = file.ListFilesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = file.ListFilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_files(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFilesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_files_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_files" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_list_files" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = file.ListFilesRequest.pb(file.ListFilesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, } - } - request_init["repository"] = { - "maven_config": {"allow_snapshot_overwrites": True, "version_policy": 1}, - "docker_config": {"immutable_tags": True}, - "virtual_repository_config": { - "upstream_policies": [ - {"id": "id_value", "repository": "repository_value", "priority": 898} - ] - }, - "remote_repository_config": { - "docker_repository": {"public_repository": 1}, - "maven_repository": {"public_repository": 1}, - "npm_repository": {"public_repository": 1}, - "python_repository": {"public_repository": 1}, - "apt_repository": { - "public_repository": { - "repository_base": 1, - "repository_path": "repository_path_value", - } - }, - "yum_repository": { - "public_repository": { - "repository_base": 1, - "repository_path": "repository_path_value", - } - }, - "description": "description_value", - "upstream_credentials": { - "username_password_credentials": { - "username": "username_value", - "password_secret_version": "password_secret_version_value", - } - }, - }, - "name": "projects/sample1/locations/sample2/repositories/sample3", - "format_": 1, - "description": "description_value", - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "kms_key_name": "kms_key_name_value", - "mode": 1, - "cleanup_policies": {}, - "size_bytes": 1089, - "satisfies_pzs": True, - "cleanup_policy_dry_run": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Determine if the message type is proto-plus or protobuf - test_field = gda_repository.UpdateRepositoryRequest.meta.fields["repository"] + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = file.ListFilesResponse.to_json(file.ListFilesResponse()) + req.return_value.content = return_value - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + request = file.ListFilesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = file.ListFilesResponse() - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + client.list_files( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pre.assert_called_once() + post.assert_called_once() - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_get_file_rest_bad_request(request_type=file.GetFileRequest): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } + request = request_type(**request_init) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["repository"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_file(request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["repository"][field])): - del request_init["repository"][field][i][subfield] - else: - del request_init["repository"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + file.GetFileRequest, + dict, + ], +) +def test_get_file_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gda_repository.Repository( + return_value = file.File( name="name_value", - format_=gda_repository.Repository.Format.DOCKER, - description="description_value", - kms_key_name="kms_key_name_value", - mode=gda_repository.Repository.Mode.STANDARD_REPOSITORY, size_bytes=1089, - satisfies_pzs=True, - cleanup_policy_dry_run=True, + owner="owner_value", ) # Wrap the value into a proper Response obj @@ -24529,26 +33683,21 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gda_repository.Repository.pb(return_value) + return_value = file.File.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_repository(request) + response = client.get_file(request) # Establish that the response is the type that we expect. - assert isinstance(response, gda_repository.Repository) + assert isinstance(response, file.File) assert response.name == "name_value" - assert response.format_ == gda_repository.Repository.Format.DOCKER - assert response.description == "description_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.mode == gda_repository.Repository.Mode.STANDARD_REPOSITORY assert response.size_bytes == 1089 - assert response.satisfies_pzs is True - assert response.cleanup_policy_dry_run is True + assert response.owner == "owner_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_repository_rest_interceptors(null_interceptor): +def test_get_file_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24562,15 +33711,13 @@ def test_update_repository_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_update_repository" + transports.ArtifactRegistryRestInterceptor, "post_get_file" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_update_repository" + transports.ArtifactRegistryRestInterceptor, "pre_get_file" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gda_repository.UpdateRepositoryRequest.pb( - gda_repository.UpdateRepositoryRequest() - ) + pb_message = file.GetFileRequest.pb(file.GetFileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24580,18 +33727,18 @@ def test_update_repository_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = gda_repository.Repository.to_json(gda_repository.Repository()) + return_value = file.File.to_json(file.File()) req.return_value.content = return_value - request = gda_repository.UpdateRepositoryRequest() + request = file.GetFileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gda_repository.Repository() + post.return_value = file.File() - client.update_repository( + client.get_file( request, metadata=[ ("key", "val"), @@ -24603,14 +33750,14 @@ def test_update_repository_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_repository_rest_bad_request( - request_type=repository.DeleteRepositoryRequest, -): +def test_delete_file_rest_bad_request(request_type=file.DeleteFileRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24624,23 +33771,25 @@ def test_delete_repository_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.delete_repository(request) + client.delete_file(request) @pytest.mark.parametrize( "request_type", [ - repository.DeleteRepositoryRequest, + file.DeleteFileRequest, dict, ], ) -def test_delete_repository_rest_call_success(request_type): +def test_delete_file_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -24654,14 +33803,14 @@ def test_delete_repository_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_repository(request) + response = client.delete_file(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_repository_rest_interceptors(null_interceptor): +def test_delete_file_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24677,15 +33826,13 @@ def test_delete_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_delete_repository" + transports.ArtifactRegistryRestInterceptor, "post_delete_file" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_delete_repository" + transports.ArtifactRegistryRestInterceptor, "pre_delete_file" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = repository.DeleteRepositoryRequest.pb( - repository.DeleteRepositoryRequest() - ) + pb_message = file.DeleteFileRequest.pb(file.DeleteFileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24698,7 +33845,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = repository.DeleteRepositoryRequest() + request = file.DeleteFileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -24706,7 +33853,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_repository( + client.delete_file( request, metadata=[ ("key", "val"), @@ -24718,12 +33865,16 @@ def test_delete_repository_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_packages_rest_bad_request(request_type=package.ListPackagesRequest): +def test_update_file_rest_bad_request(request_type=gda_file.UpdateFileRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "file": { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24737,30 +33888,113 @@ def test_list_packages_rest_bad_request(request_type=package.ListPackagesRequest response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_packages(request) + client.update_file(request) @pytest.mark.parametrize( "request_type", [ - package.ListPackagesRequest, + gda_file.UpdateFileRequest, dict, ], ) -def test_list_packages_rest_call_success(request_type): +def test_update_file_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init = { + "file": { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + } + } + request_init["file"] = { + "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4", + "size_bytes": 1089, + "hashes": [{"type_": 1, "value": b"value_blob"}], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "owner": "owner_value", + "fetch_time": {}, + "annotations": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_file.UpdateFileRequest.meta.fields["file"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["file"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["file"][field])): + del request_init["file"][field][i][subfield] + else: + del request_init["file"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = package.ListPackagesResponse( - next_page_token="next_page_token_value", + return_value = gda_file.File( + name="name_value", + size_bytes=1089, + owner="owner_value", ) # Wrap the value into a proper Response obj @@ -24768,19 +34002,21 @@ def test_list_packages_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = package.ListPackagesResponse.pb(return_value) + return_value = gda_file.File.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_packages(request) + response = client.update_file(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPackagesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gda_file.File) + assert response.name == "name_value" + assert response.size_bytes == 1089 + assert response.owner == "owner_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_packages_rest_interceptors(null_interceptor): +def test_update_file_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24794,13 +34030,13 @@ def test_list_packages_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_packages" + transports.ArtifactRegistryRestInterceptor, "post_update_file" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_packages" + transports.ArtifactRegistryRestInterceptor, "pre_update_file" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = package.ListPackagesRequest.pb(package.ListPackagesRequest()) + pb_message = gda_file.UpdateFileRequest.pb(gda_file.UpdateFileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24810,20 +34046,18 @@ def test_list_packages_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = package.ListPackagesResponse.to_json( - package.ListPackagesResponse() - ) + return_value = gda_file.File.to_json(gda_file.File()) req.return_value.content = return_value - request = package.ListPackagesRequest() + request = gda_file.UpdateFileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = package.ListPackagesResponse() + post.return_value = gda_file.File() - client.list_packages( + client.update_file( request, metadata=[ ("key", "val"), @@ -24835,13 +34069,13 @@ def test_list_packages_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_package_rest_bad_request(request_type=package.GetPackageRequest): +def test_list_tags_rest_bad_request(request_type=tag.ListTagsRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" } request = request_type(**request_init) @@ -24856,33 +34090,32 @@ def test_get_package_rest_bad_request(request_type=package.GetPackageRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_package(request) + client.list_tags(request) @pytest.mark.parametrize( "request_type", [ - package.GetPackageRequest, + tag.ListTagsRequest, dict, ], ) -def test_get_package_rest_call_success(request_type): +def test_list_tags_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = package.Package( - name="name_value", - display_name="display_name_value", + return_value = tag.ListTagsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -24890,20 +34123,19 @@ def test_get_package_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = package.Package.pb(return_value) + return_value = tag.ListTagsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_package(request) + response = client.list_tags(request) # Establish that the response is the type that we expect. - assert isinstance(response, package.Package) - assert response.name == "name_value" - assert response.display_name == "display_name_value" + assert isinstance(response, pagers.ListTagsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_package_rest_interceptors(null_interceptor): +def test_list_tags_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24917,13 +34149,13 @@ def test_get_package_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_package" + transports.ArtifactRegistryRestInterceptor, "post_list_tags" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_package" + transports.ArtifactRegistryRestInterceptor, "pre_list_tags" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = package.GetPackageRequest.pb(package.GetPackageRequest()) + pb_message = tag.ListTagsRequest.pb(tag.ListTagsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24933,18 +34165,18 @@ def test_get_package_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = package.Package.to_json(package.Package()) + return_value = tag.ListTagsResponse.to_json(tag.ListTagsResponse()) req.return_value.content = return_value - request = package.GetPackageRequest() + request = tag.ListTagsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = package.Package() + post.return_value = tag.ListTagsResponse() - client.get_package( + client.list_tags( request, metadata=[ ("key", "val"), @@ -24956,13 +34188,13 @@ def test_get_package_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_package_rest_bad_request(request_type=package.DeletePackageRequest): +def test_get_tag_rest_bad_request(request_type=tag.GetTagRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" } request = request_type(**request_init) @@ -24977,46 +34209,54 @@ def test_delete_package_rest_bad_request(request_type=package.DeletePackageReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.delete_package(request) + client.get_tag(request) @pytest.mark.parametrize( "request_type", [ - package.DeletePackageRequest, + tag.GetTagRequest, dict, ], ) -def test_delete_package_rest_call_success(request_type): +def test_get_tag_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = tag.Tag( + name="name_value", + version="version_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tag.Tag.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_package(request) + response = client.get_tag(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, tag.Tag) + assert response.name == "name_value" + assert response.version == "version_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_package_rest_interceptors(null_interceptor): +def test_get_tag_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25030,15 +34270,13 @@ def test_delete_package_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_delete_package" + transports.ArtifactRegistryRestInterceptor, "post_get_tag" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_delete_package" + transports.ArtifactRegistryRestInterceptor, "pre_get_tag" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = package.DeletePackageRequest.pb(package.DeletePackageRequest()) + pb_message = tag.GetTagRequest.pb(tag.GetTagRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25048,18 +34286,18 @@ def test_delete_package_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = tag.Tag.to_json(tag.Tag()) req.return_value.content = return_value - request = package.DeletePackageRequest() + request = tag.GetTagRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = tag.Tag() - client.delete_package( + client.get_tag( request, metadata=[ ("key", "val"), @@ -25071,7 +34309,7 @@ def test_delete_package_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_versions_rest_bad_request(request_type=version.ListVersionsRequest): +def test_create_tag_rest_bad_request(request_type=gda_tag.CreateTagRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25092,17 +34330,17 @@ def test_list_versions_rest_bad_request(request_type=version.ListVersionsRequest response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_versions(request) + client.create_tag(request) @pytest.mark.parametrize( "request_type", [ - version.ListVersionsRequest, + gda_tag.CreateTagRequest, dict, ], ) -def test_list_versions_rest_call_success(request_type): +def test_create_tag_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25111,13 +34349,82 @@ def test_list_versions_rest_call_success(request_type): request_init = { "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" } + request_init["tag"] = {"name": "name_value", "version": "version_value"} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_tag.CreateTagRequest.meta.fields["tag"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tag"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tag"][field])): + del request_init["tag"][field][i][subfield] + else: + del request_init["tag"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = version.ListVersionsResponse( - next_page_token="next_page_token_value", + return_value = gda_tag.Tag( + name="name_value", + version="version_value", ) # Wrap the value into a proper Response obj @@ -25125,19 +34432,20 @@ def test_list_versions_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = version.ListVersionsResponse.pb(return_value) + return_value = gda_tag.Tag.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_versions(request) + response = client.create_tag(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVersionsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gda_tag.Tag) + assert response.name == "name_value" + assert response.version == "version_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_versions_rest_interceptors(null_interceptor): +def test_create_tag_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25151,13 +34459,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_versions" + transports.ArtifactRegistryRestInterceptor, "post_create_tag" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_versions" + transports.ArtifactRegistryRestInterceptor, "pre_create_tag" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) + pb_message = gda_tag.CreateTagRequest.pb(gda_tag.CreateTagRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25167,20 +34475,18 @@ def test_list_versions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = version.ListVersionsResponse.to_json( - version.ListVersionsResponse() - ) + return_value = gda_tag.Tag.to_json(gda_tag.Tag()) req.return_value.content = return_value - request = version.ListVersionsRequest() + request = gda_tag.CreateTagRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = version.ListVersionsResponse() + post.return_value = gda_tag.Tag() - client.list_versions( + client.create_tag( request, metadata=[ ("key", "val"), @@ -25192,13 +34498,15 @@ def test_list_versions_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): +def test_update_tag_rest_bad_request(request_type=gda_tag.UpdateTagRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + "tag": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + } } request = request_type(**request_init) @@ -25213,33 +34521,106 @@ def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_version(request) + client.update_tag(request) @pytest.mark.parametrize( "request_type", [ - version.GetVersionRequest, + gda_tag.UpdateTagRequest, dict, ], ) -def test_get_version_rest_call_success(request_type): +def test_update_tag_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + "tag": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + } + } + request_init["tag"] = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5", + "version": "version_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_tag.UpdateTagRequest.meta.fields["tag"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tag"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tag"][field])): + del request_init["tag"][field][i][subfield] + else: + del request_init["tag"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = version.Version( + return_value = gda_tag.Tag( name="name_value", - description="description_value", + version="version_value", ) # Wrap the value into a proper Response obj @@ -25247,20 +34628,20 @@ def test_get_version_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = version.Version.pb(return_value) + return_value = gda_tag.Tag.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_version(request) + response = client.update_tag(request) # Establish that the response is the type that we expect. - assert isinstance(response, version.Version) + assert isinstance(response, gda_tag.Tag) assert response.name == "name_value" - assert response.description == "description_value" + assert response.version == "version_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_version_rest_interceptors(null_interceptor): +def test_update_tag_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25274,13 +34655,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_version" + transports.ArtifactRegistryRestInterceptor, "post_update_tag" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_version" + transports.ArtifactRegistryRestInterceptor, "pre_update_tag" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) + pb_message = gda_tag.UpdateTagRequest.pb(gda_tag.UpdateTagRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25290,18 +34671,18 @@ def test_get_version_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = version.Version.to_json(version.Version()) + return_value = gda_tag.Tag.to_json(gda_tag.Tag()) req.return_value.content = return_value - request = version.GetVersionRequest() + request = gda_tag.UpdateTagRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = version.Version() + post.return_value = gda_tag.Tag() - client.get_version( + client.update_tag( request, metadata=[ ("key", "val"), @@ -25313,13 +34694,13 @@ def test_get_version_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): +def test_delete_tag_rest_bad_request(request_type=tag.DeleteTagRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" } request = request_type(**request_init) @@ -25334,46 +34715,46 @@ def test_delete_version_rest_bad_request(request_type=version.DeleteVersionReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.delete_version(request) + client.delete_tag(request) @pytest.mark.parametrize( "request_type", [ - version.DeleteVersionRequest, + tag.DeleteTagRequest, dict, ], ) -def test_delete_version_rest_call_success(request_type): +def test_delete_tag_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/versions/sample5" + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_version(request) + response = client.delete_tag(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_version_rest_interceptors(null_interceptor): +def test_delete_tag_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25387,15 +34768,10 @@ def test_delete_version_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_delete_version" - ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_delete_version" + transports.ArtifactRegistryRestInterceptor, "pre_delete_tag" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = version.DeleteVersionRequest.pb(version.DeleteVersionRequest()) + pb_message = tag.DeleteTagRequest.pb(tag.DeleteTagRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25405,18 +34781,15 @@ def test_delete_version_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - request = version.DeleteVersionRequest() + request = tag.DeleteTagRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - client.delete_version( + client.delete_tag( request, metadata=[ ("key", "val"), @@ -25425,19 +34798,14 @@ def test_delete_version_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_batch_delete_versions_rest_bad_request( - request_type=version.BatchDeleteVersionsRequest, -): +def test_create_rule_rest_bad_request(request_type=gda_rule.CreateRuleRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25451,46 +34819,135 @@ def test_batch_delete_versions_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.batch_delete_versions(request) + client.create_rule(request) @pytest.mark.parametrize( "request_type", [ - version.BatchDeleteVersionsRequest, + gda_rule.CreateRuleRequest, dict, ], ) -def test_batch_delete_versions_rest_call_success(request_type): +def test_create_rule_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" - } + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init["rule"] = { + "name": "name_value", + "action": 1, + "operation": 1, + "condition": { + "expression": "expression_value", + "title": "title_value", + "description": "description_value", + "location": "location_value", + }, + "package_id": "package_id_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_rule.CreateRuleRequest.meta.fields["rule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["rule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["rule"][field])): + del request_init["rule"][field][i][subfield] + else: + del request_init["rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = gda_rule.Rule( + name="name_value", + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gda_rule.Rule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_delete_versions(request) + response = client.create_rule(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, gda_rule.Rule) + assert response.name == "name_value" + assert response.action == gda_rule.Rule.Action.ALLOW + assert response.operation == gda_rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_delete_versions_rest_interceptors(null_interceptor): +def test_create_rule_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25504,17 +34961,13 @@ def test_batch_delete_versions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_batch_delete_versions" + transports.ArtifactRegistryRestInterceptor, "post_create_rule" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_batch_delete_versions" + transports.ArtifactRegistryRestInterceptor, "pre_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = version.BatchDeleteVersionsRequest.pb( - version.BatchDeleteVersionsRequest() - ) + pb_message = gda_rule.CreateRuleRequest.pb(gda_rule.CreateRuleRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25524,18 +34977,18 @@ def test_batch_delete_versions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = gda_rule.Rule.to_json(gda_rule.Rule()) req.return_value.content = return_value - request = version.BatchDeleteVersionsRequest() + request = gda_rule.CreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = gda_rule.Rule() - client.batch_delete_versions( + client.create_rule( request, metadata=[ ("key", "val"), @@ -25547,7 +35000,7 @@ def test_batch_delete_versions_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_files_rest_bad_request(request_type=file.ListFilesRequest): +def test_list_rules_rest_bad_request(request_type=rule.ListRulesRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25566,17 +35019,17 @@ def test_list_files_rest_bad_request(request_type=file.ListFilesRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_files(request) + client.list_rules(request) @pytest.mark.parametrize( "request_type", [ - file.ListFilesRequest, + rule.ListRulesRequest, dict, ], ) -def test_list_files_rest_call_success(request_type): +def test_list_rules_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -25588,7 +35041,7 @@ def test_list_files_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = file.ListFilesResponse( + return_value = rule.ListRulesResponse( next_page_token="next_page_token_value", ) @@ -25597,19 +35050,19 @@ def test_list_files_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = file.ListFilesResponse.pb(return_value) + return_value = rule.ListRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_files(request) + response = client.list_rules(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFilesPager) + assert isinstance(response, pagers.ListRulesPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_files_rest_interceptors(null_interceptor): +def test_list_rules_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25623,13 +35076,13 @@ def test_list_files_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_files" + transports.ArtifactRegistryRestInterceptor, "post_list_rules" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_files" + transports.ArtifactRegistryRestInterceptor, "pre_list_rules" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = file.ListFilesRequest.pb(file.ListFilesRequest()) + pb_message = rule.ListRulesRequest.pb(rule.ListRulesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25639,18 +35092,18 @@ def test_list_files_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = file.ListFilesResponse.to_json(file.ListFilesResponse()) + return_value = rule.ListRulesResponse.to_json(rule.ListRulesResponse()) req.return_value.content = return_value - request = file.ListFilesRequest() + request = rule.ListRulesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = file.ListFilesResponse() + post.return_value = rule.ListRulesResponse() - client.list_files( + client.list_rules( request, metadata=[ ("key", "val"), @@ -25662,13 +35115,13 @@ def test_list_files_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_file_rest_bad_request(request_type=file.GetFileRequest): +def test_get_rule_rest_bad_request(request_type=rule.GetRuleRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" } request = request_type(**request_init) @@ -25683,34 +35136,35 @@ def test_get_file_rest_bad_request(request_type=file.GetFileRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_file(request) + client.get_rule(request) @pytest.mark.parametrize( "request_type", [ - file.GetFileRequest, + rule.GetRuleRequest, dict, ], ) -def test_get_file_rest_call_success(request_type): +def test_get_rule_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/files/sample4" + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = file.File( + return_value = rule.Rule( name="name_value", - size_bytes=1089, - owner="owner_value", + action=rule.Rule.Action.ALLOW, + operation=rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) # Wrap the value into a proper Response obj @@ -25718,21 +35172,22 @@ def test_get_file_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = file.File.pb(return_value) + return_value = rule.Rule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_file(request) + response = client.get_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, file.File) + assert isinstance(response, rule.Rule) assert response.name == "name_value" - assert response.size_bytes == 1089 - assert response.owner == "owner_value" + assert response.action == rule.Rule.Action.ALLOW + assert response.operation == rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_file_rest_interceptors(null_interceptor): +def test_get_rule_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25746,13 +35201,13 @@ def test_get_file_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_file" + transports.ArtifactRegistryRestInterceptor, "post_get_rule" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_file" + transports.ArtifactRegistryRestInterceptor, "pre_get_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = file.GetFileRequest.pb(file.GetFileRequest()) + pb_message = rule.GetRuleRequest.pb(rule.GetRuleRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25762,18 +35217,18 @@ def test_get_file_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = file.File.to_json(file.File()) + return_value = rule.Rule.to_json(rule.Rule()) req.return_value.content = return_value - request = file.GetFileRequest() + request = rule.GetRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = file.File() + post.return_value = rule.Rule() - client.get_file( + client.get_rule( request, metadata=[ ("key", "val"), @@ -25785,13 +35240,15 @@ def test_get_file_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_tags_rest_bad_request(request_type=tag.ListTagsRequest): +def test_update_rule_rest_bad_request(request_type=gda_rule.UpdateRuleRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "rule": { + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" + } } request = request_type(**request_init) @@ -25806,32 +35263,116 @@ def test_list_tags_rest_bad_request(request_type=tag.ListTagsRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_tags(request) + client.update_rule(request) @pytest.mark.parametrize( "request_type", [ - tag.ListTagsRequest, + gda_rule.UpdateRuleRequest, dict, ], ) -def test_list_tags_rest_call_success(request_type): +def test_update_rule_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "rule": { + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" + } + } + request_init["rule"] = { + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4", + "action": 1, + "operation": 1, + "condition": { + "expression": "expression_value", + "title": "title_value", + "description": "description_value", + "location": "location_value", + }, + "package_id": "package_id_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_rule.UpdateRuleRequest.meta.fields["rule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["rule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["rule"][field])): + del request_init["rule"][field][i][subfield] + else: + del request_init["rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = tag.ListTagsResponse( - next_page_token="next_page_token_value", + return_value = gda_rule.Rule( + name="name_value", + action=gda_rule.Rule.Action.ALLOW, + operation=gda_rule.Rule.Operation.DOWNLOAD, + package_id="package_id_value", ) # Wrap the value into a proper Response obj @@ -25839,19 +35380,22 @@ def test_list_tags_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = tag.ListTagsResponse.pb(return_value) + return_value = gda_rule.Rule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_tags(request) + response = client.update_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gda_rule.Rule) + assert response.name == "name_value" + assert response.action == gda_rule.Rule.Action.ALLOW + assert response.operation == gda_rule.Rule.Operation.DOWNLOAD + assert response.package_id == "package_id_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_tags_rest_interceptors(null_interceptor): +def test_update_rule_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25865,13 +35409,13 @@ def test_list_tags_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_list_tags" + transports.ArtifactRegistryRestInterceptor, "post_update_rule" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_list_tags" + transports.ArtifactRegistryRestInterceptor, "pre_update_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = tag.ListTagsRequest.pb(tag.ListTagsRequest()) + pb_message = gda_rule.UpdateRuleRequest.pb(gda_rule.UpdateRuleRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25881,18 +35425,18 @@ def test_list_tags_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = tag.ListTagsResponse.to_json(tag.ListTagsResponse()) + return_value = gda_rule.Rule.to_json(gda_rule.Rule()) req.return_value.content = return_value - request = tag.ListTagsRequest() + request = gda_rule.UpdateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = tag.ListTagsResponse() + post.return_value = gda_rule.Rule() - client.list_tags( + client.update_rule( request, metadata=[ ("key", "val"), @@ -25904,13 +35448,13 @@ def test_list_tags_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_tag_rest_bad_request(request_type=tag.GetTagRequest): +def test_delete_rule_rest_bad_request(request_type=rule.DeleteRuleRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" } request = request_type(**request_init) @@ -25925,54 +35469,46 @@ def test_get_tag_rest_bad_request(request_type=tag.GetTagRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_tag(request) + client.delete_rule(request) @pytest.mark.parametrize( "request_type", [ - tag.GetTagRequest, + rule.DeleteRuleRequest, dict, ], ) -def test_get_tag_rest_call_success(request_type): +def test_delete_rule_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + "name": "projects/sample1/locations/sample2/repositories/sample3/rules/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = tag.Tag( - name="name_value", - version="version_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = tag.Tag.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_tag(request) + response = client.delete_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, tag.Tag) - assert response.name == "name_value" - assert response.version == "version_value" + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_tag_rest_interceptors(null_interceptor): +def test_delete_rule_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25986,13 +35522,10 @@ def test_get_tag_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_tag" - ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_tag" + transports.ArtifactRegistryRestInterceptor, "pre_delete_rule" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = tag.GetTagRequest.pb(tag.GetTagRequest()) + pb_message = rule.DeleteRuleRequest.pb(rule.DeleteRuleRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26002,18 +35535,15 @@ def test_get_tag_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = tag.Tag.to_json(tag.Tag()) - req.return_value.content = return_value - request = tag.GetTagRequest() + request = rule.DeleteRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = tag.Tag() - client.get_tag( + client.delete_rule( request, metadata=[ ("key", "val"), @@ -26022,16 +35552,17 @@ def test_get_tag_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_create_tag_rest_bad_request(request_type=gda_tag.CreateTagRequest): +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "resource": "projects/sample1/locations/sample2/repositories/sample3" } request = request_type(**request_init) @@ -26046,122 +35577,51 @@ def test_create_tag_rest_bad_request(request_type=gda_tag.CreateTagRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.create_tag(request) + client.set_iam_policy(request) @pytest.mark.parametrize( "request_type", [ - gda_tag.CreateTagRequest, + iam_policy_pb2.SetIamPolicyRequest, dict, ], ) -def test_create_tag_rest_call_success(request_type): +def test_set_iam_policy_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + "resource": "projects/sample1/locations/sample2/repositories/sample3" } - request_init["tag"] = {"name": "name_value", "version": "version_value"} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gda_tag.CreateTagRequest.meta.fields["tag"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["tag"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["tag"][field])): - del request_init["tag"][field][i][subfield] - else: - del request_init["tag"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gda_tag.Tag( - name="name_value", - version="version_value", + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gda_tag.Tag.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_tag(request) + response = client.set_iam_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, gda_tag.Tag) - assert response.name == "name_value" - assert response.version == "version_value" + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_tag_rest_interceptors(null_interceptor): +def test_set_iam_policy_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26175,13 +35635,13 @@ def test_create_tag_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_create_tag" + transports.ArtifactRegistryRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_create_tag" + transports.ArtifactRegistryRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gda_tag.CreateTagRequest.pb(gda_tag.CreateTagRequest()) + pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26191,18 +35651,18 @@ def test_create_tag_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = gda_tag.Tag.to_json(gda_tag.Tag()) + return_value = json_format.MessageToJson(policy_pb2.Policy()) req.return_value.content = return_value - request = gda_tag.CreateTagRequest() + request = iam_policy_pb2.SetIamPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gda_tag.Tag() + post.return_value = policy_pb2.Policy() - client.create_tag( + client.set_iam_policy( request, metadata=[ ("key", "val"), @@ -26214,15 +35674,15 @@ def test_create_tag_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_tag_rest_bad_request(request_type=gda_tag.UpdateTagRequest): +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "tag": { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" - } + "resource": "projects/sample1/locations/sample2/repositories/sample3" } request = request_type(**request_init) @@ -26237,127 +35697,51 @@ def test_update_tag_rest_bad_request(request_type=gda_tag.UpdateTagRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.update_tag(request) + client.get_iam_policy(request) @pytest.mark.parametrize( "request_type", [ - gda_tag.UpdateTagRequest, + iam_policy_pb2.GetIamPolicyRequest, dict, ], ) -def test_update_tag_rest_call_success(request_type): +def test_get_iam_policy_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "tag": { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" - } - } - request_init["tag"] = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5", - "version": "version_value", + "resource": "projects/sample1/locations/sample2/repositories/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gda_tag.UpdateTagRequest.meta.fields["tag"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["tag"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["tag"][field])): - del request_init["tag"][field][i][subfield] - else: - del request_init["tag"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gda_tag.Tag( - name="name_value", - version="version_value", + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gda_tag.Tag.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_tag(request) + response = client.get_iam_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, gda_tag.Tag) - assert response.name == "name_value" - assert response.version == "version_value" + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_tag_rest_interceptors(null_interceptor): +def test_get_iam_policy_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26371,13 +35755,13 @@ def test_update_tag_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_update_tag" + transports.ArtifactRegistryRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_update_tag" + transports.ArtifactRegistryRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gda_tag.UpdateTagRequest.pb(gda_tag.UpdateTagRequest()) + pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26387,18 +35771,18 @@ def test_update_tag_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = gda_tag.Tag.to_json(gda_tag.Tag()) + return_value = json_format.MessageToJson(policy_pb2.Policy()) req.return_value.content = return_value - request = gda_tag.UpdateTagRequest() + request = iam_policy_pb2.GetIamPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gda_tag.Tag() + post.return_value = policy_pb2.Policy() - client.update_tag( + client.get_iam_policy( request, metadata=[ ("key", "val"), @@ -26410,13 +35794,15 @@ def test_update_tag_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_tag_rest_bad_request(request_type=tag.DeleteTagRequest): +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + "resource": "projects/sample1/locations/sample2/repositories/sample3" } request = request_type(**request_init) @@ -26431,46 +35817,49 @@ def test_delete_tag_rest_bad_request(request_type=tag.DeleteTagRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.delete_tag(request) + client.test_iam_permissions(request) @pytest.mark.parametrize( "request_type", [ - tag.DeleteTagRequest, + iam_policy_pb2.TestIamPermissionsRequest, dict, ], ) -def test_delete_tag_rest_call_success(request_type): +def test_test_iam_permissions_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4/tags/sample5" + "resource": "projects/sample1/locations/sample2/repositories/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_tag(request) + response = client.test_iam_permissions(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_tag_rest_interceptors(null_interceptor): +def test_test_iam_permissions_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26484,10 +35873,13 @@ def test_delete_tag_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_delete_tag" + transports.ArtifactRegistryRestInterceptor, "post_test_iam_permissions" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() - pb_message = tag.DeleteTagRequest.pb(tag.DeleteTagRequest()) + post.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26497,15 +35889,20 @@ def test_delete_tag_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + return_value = json_format.MessageToJson( + iam_policy_pb2.TestIamPermissionsResponse() + ) + req.return_value.content = return_value - request = tag.DeleteTagRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.delete_tag( + client.test_iam_permissions( request, metadata=[ ("key", "val"), @@ -26514,18 +35911,17 @@ def test_delete_tag_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_set_iam_policy_rest_bad_request( - request_type=iam_policy_pb2.SetIamPolicyRequest, +def test_get_project_settings_rest_bad_request( + request_type=settings.GetProjectSettingsRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "resource": "projects/sample1/locations/sample2/repositories/sample3" - } + request_init = {"name": "projects/sample1/projectSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26539,51 +35935,57 @@ def test_set_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.set_iam_policy(request) + client.get_project_settings(request) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + settings.GetProjectSettingsRequest, dict, ], ) -def test_set_iam_policy_rest_call_success(request_type): +def test_get_project_settings_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "resource": "projects/sample1/locations/sample2/repositories/sample3" - } + request_init = {"name": "projects/sample1/projectSettings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + return_value = settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = settings.ProjectSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.get_project_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, settings.ProjectSettings) + assert response.name == "name_value" + assert ( + response.legacy_redirection_state + == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED + ) + assert response.pull_percent == 1293 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): +def test_get_project_settings_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26597,13 +35999,15 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_set_iam_policy" + transports.ArtifactRegistryRestInterceptor, "post_get_project_settings" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_set_iam_policy" + transports.ArtifactRegistryRestInterceptor, "pre_get_project_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() + pb_message = settings.GetProjectSettingsRequest.pb( + settings.GetProjectSettingsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26613,18 +36017,18 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(policy_pb2.Policy()) + return_value = settings.ProjectSettings.to_json(settings.ProjectSettings()) req.return_value.content = return_value - request = iam_policy_pb2.SetIamPolicyRequest() + request = settings.GetProjectSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() + post.return_value = settings.ProjectSettings() - client.set_iam_policy( + client.get_project_settings( request, metadata=[ ("key", "val"), @@ -26636,16 +36040,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_iam_policy_rest_bad_request( - request_type=iam_policy_pb2.GetIamPolicyRequest, +def test_update_project_settings_rest_bad_request( + request_type=settings.UpdateProjectSettingsRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "resource": "projects/sample1/locations/sample2/repositories/sample3" - } + request_init = {"project_settings": {"name": "projects/sample1/projectSettings"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26659,51 +36061,129 @@ def test_get_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_iam_policy(request) + client.update_project_settings(request) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.GetIamPolicyRequest, + settings.UpdateProjectSettingsRequest, dict, ], ) -def test_get_iam_policy_rest_call_success(request_type): +def test_update_project_settings_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "resource": "projects/sample1/locations/sample2/repositories/sample3" + request_init = {"project_settings": {"name": "projects/sample1/projectSettings"}} + request_init["project_settings"] = { + "name": "projects/sample1/projectSettings", + "legacy_redirection_state": 1, + "pull_percent": 1293, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = settings.UpdateProjectSettingsRequest.meta.fields["project_settings"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["project_settings"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["project_settings"][field])): + del request_init["project_settings"][field][i][subfield] + else: + del request_init["project_settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + return_value = settings.ProjectSettings( + name="name_value", + legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + pull_percent=1293, ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = settings.ProjectSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_iam_policy(request) + response = client.update_project_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, settings.ProjectSettings) + assert response.name == "name_value" + assert ( + response.legacy_redirection_state + == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED + ) + assert response.pull_percent == 1293 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): +def test_update_project_settings_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26717,13 +36197,15 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_iam_policy" + transports.ArtifactRegistryRestInterceptor, "post_update_project_settings" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_iam_policy" + transports.ArtifactRegistryRestInterceptor, "pre_update_project_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() + pb_message = settings.UpdateProjectSettingsRequest.pb( + settings.UpdateProjectSettingsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26733,18 +36215,18 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson(policy_pb2.Policy()) + return_value = settings.ProjectSettings.to_json(settings.ProjectSettings()) req.return_value.content = return_value - request = iam_policy_pb2.GetIamPolicyRequest() + request = settings.UpdateProjectSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() + post.return_value = settings.ProjectSettings() - client.get_iam_policy( + client.update_project_settings( request, metadata=[ ("key", "val"), @@ -26756,16 +36238,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_test_iam_permissions_rest_bad_request( - request_type=iam_policy_pb2.TestIamPermissionsRequest, +def test_get_vpcsc_config_rest_bad_request( + request_type=vpcsc_config.GetVPCSCConfigRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "resource": "projects/sample1/locations/sample2/repositories/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/vpcscConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26779,49 +36259,52 @@ def test_test_iam_permissions_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.test_iam_permissions(request) + client.get_vpcsc_config(request) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, + vpcsc_config.GetVPCSCConfigRequest, dict, ], ) -def test_test_iam_permissions_rest_call_success(request_type): +def test_get_vpcsc_config_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "resource": "projects/sample1/locations/sample2/repositories/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/vpcscConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], + return_value = vpcsc_config.VPCSCConfig( + name="name_value", + vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpcsc_config.VPCSCConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.test_iam_permissions(request) + response = client.get_vpcsc_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ["permissions_value"] + assert isinstance(response, vpcsc_config.VPCSCConfig) + assert response.name == "name_value" + assert response.vpcsc_policy == vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): +def test_get_vpcsc_config_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26835,13 +36318,15 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_test_iam_permissions" + transports.ArtifactRegistryRestInterceptor, "post_get_vpcsc_config" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_test_iam_permissions" + transports.ArtifactRegistryRestInterceptor, "pre_get_vpcsc_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() + pb_message = vpcsc_config.GetVPCSCConfigRequest.pb( + vpcsc_config.GetVPCSCConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26851,20 +36336,18 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = json_format.MessageToJson( - iam_policy_pb2.TestIamPermissionsResponse() - ) + return_value = vpcsc_config.VPCSCConfig.to_json(vpcsc_config.VPCSCConfig()) req.return_value.content = return_value - request = iam_policy_pb2.TestIamPermissionsRequest() + request = vpcsc_config.GetVPCSCConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post.return_value = vpcsc_config.VPCSCConfig() - client.test_iam_permissions( + client.get_vpcsc_config( request, metadata=[ ("key", "val"), @@ -26876,14 +36359,16 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_project_settings_rest_bad_request( - request_type=settings.GetProjectSettingsRequest, +def test_update_vpcsc_config_rest_bad_request( + request_type=gda_vpcsc_config.UpdateVPCSCConfigRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/projectSettings"} + request_init = { + "vpcsc_config": {"name": "projects/sample1/locations/sample2/vpcscConfig"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26897,31 +36382,104 @@ def test_get_project_settings_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_project_settings(request) + client.update_vpcsc_config(request) @pytest.mark.parametrize( "request_type", [ - settings.GetProjectSettingsRequest, + gda_vpcsc_config.UpdateVPCSCConfigRequest, dict, ], ) -def test_get_project_settings_rest_call_success(request_type): +def test_update_vpcsc_config_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/projectSettings"} + request_init = { + "vpcsc_config": {"name": "projects/sample1/locations/sample2/vpcscConfig"} + } + request_init["vpcsc_config"] = { + "name": "projects/sample1/locations/sample2/vpcscConfig", + "vpcsc_policy": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gda_vpcsc_config.UpdateVPCSCConfigRequest.meta.fields["vpcsc_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["vpcsc_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpcsc_config"][field])): + del request_init["vpcsc_config"][field][i][subfield] + else: + del request_init["vpcsc_config"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = settings.ProjectSettings( + return_value = gda_vpcsc_config.VPCSCConfig( name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, ) # Wrap the value into a proper Response obj @@ -26929,23 +36487,20 @@ def test_get_project_settings_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = settings.ProjectSettings.pb(return_value) + return_value = gda_vpcsc_config.VPCSCConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_project_settings(request) + response = client.update_vpcsc_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, settings.ProjectSettings) + assert isinstance(response, gda_vpcsc_config.VPCSCConfig) assert response.name == "name_value" - assert ( - response.legacy_redirection_state - == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED - ) + assert response.vpcsc_policy == gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_project_settings_rest_interceptors(null_interceptor): +def test_update_vpcsc_config_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26959,14 +36514,14 @@ def test_get_project_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_project_settings" + transports.ArtifactRegistryRestInterceptor, "post_update_vpcsc_config" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_project_settings" + transports.ArtifactRegistryRestInterceptor, "pre_update_vpcsc_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = settings.GetProjectSettingsRequest.pb( - settings.GetProjectSettingsRequest() + pb_message = gda_vpcsc_config.UpdateVPCSCConfigRequest.pb( + gda_vpcsc_config.UpdateVPCSCConfigRequest() ) transcode.return_value = { "method": "post", @@ -26977,18 +36532,20 @@ def test_get_project_settings_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = settings.ProjectSettings.to_json(settings.ProjectSettings()) + return_value = gda_vpcsc_config.VPCSCConfig.to_json( + gda_vpcsc_config.VPCSCConfig() + ) req.return_value.content = return_value - request = settings.GetProjectSettingsRequest() + request = gda_vpcsc_config.UpdateVPCSCConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = settings.ProjectSettings() + post.return_value = gda_vpcsc_config.VPCSCConfig() - client.get_project_settings( + client.update_vpcsc_config( request, metadata=[ ("key", "val"), @@ -27000,14 +36557,16 @@ def test_get_project_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_project_settings_rest_bad_request( - request_type=settings.UpdateProjectSettingsRequest, -): +def test_update_package_rest_bad_request(request_type=gda_package.UpdatePackageRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"project_settings": {"name": "projects/sample1/projectSettings"}} + request_init = { + "package": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27021,33 +36580,40 @@ def test_update_project_settings_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.update_project_settings(request) + client.update_package(request) @pytest.mark.parametrize( "request_type", [ - settings.UpdateProjectSettingsRequest, + gda_package.UpdatePackageRequest, dict, ], ) -def test_update_project_settings_rest_call_success(request_type): +def test_update_package_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"project_settings": {"name": "projects/sample1/projectSettings"}} - request_init["project_settings"] = { - "name": "projects/sample1/projectSettings", - "legacy_redirection_state": 1, + request_init = { + "package": { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4" + } + } + request_init["package"] = { + "name": "projects/sample1/locations/sample2/repositories/sample3/packages/sample4", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "annotations": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = settings.UpdateProjectSettingsRequest.meta.fields["project_settings"] + test_field = gda_package.UpdatePackageRequest.meta.fields["package"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -27075,7 +36641,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["project_settings"].items(): # pragma: NO COVER + for field, value in request_init["package"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -27105,18 +36671,18 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["project_settings"][field])): - del request_init["project_settings"][field][i][subfield] + for i in range(0, len(request_init["package"][field])): + del request_init["package"][field][i][subfield] else: - del request_init["project_settings"][field][subfield] + del request_init["package"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = settings.ProjectSettings( + return_value = gda_package.Package( name="name_value", - legacy_redirection_state=settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED, + display_name="display_name_value", ) # Wrap the value into a proper Response obj @@ -27124,23 +36690,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = settings.ProjectSettings.pb(return_value) + return_value = gda_package.Package.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_project_settings(request) + response = client.update_package(request) # Establish that the response is the type that we expect. - assert isinstance(response, settings.ProjectSettings) + assert isinstance(response, gda_package.Package) assert response.name == "name_value" - assert ( - response.legacy_redirection_state - == settings.ProjectSettings.RedirectionState.REDIRECTION_FROM_GCR_IO_DISABLED - ) + assert response.display_name == "display_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_project_settings_rest_interceptors(null_interceptor): +def test_update_package_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27154,14 +36717,14 @@ def test_update_project_settings_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_update_project_settings" + transports.ArtifactRegistryRestInterceptor, "post_update_package" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_update_project_settings" + transports.ArtifactRegistryRestInterceptor, "pre_update_package" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = settings.UpdateProjectSettingsRequest.pb( - settings.UpdateProjectSettingsRequest() + pb_message = gda_package.UpdatePackageRequest.pb( + gda_package.UpdatePackageRequest() ) transcode.return_value = { "method": "post", @@ -27172,18 +36735,18 @@ def test_update_project_settings_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = settings.ProjectSettings.to_json(settings.ProjectSettings()) + return_value = gda_package.Package.to_json(gda_package.Package()) req.return_value.content = return_value - request = settings.UpdateProjectSettingsRequest() + request = gda_package.UpdatePackageRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = settings.ProjectSettings() + post.return_value = gda_package.Package() - client.update_project_settings( + client.update_package( request, metadata=[ ("key", "val"), @@ -27195,14 +36758,14 @@ def test_update_project_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_vpcsc_config_rest_bad_request( - request_type=vpcsc_config.GetVPCSCConfigRequest, +def test_list_attachments_rest_bad_request( + request_type=attachment.ListAttachmentsRequest, ): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/vpcscConfig"} + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27216,31 +36779,30 @@ def test_get_vpcsc_config_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_vpcsc_config(request) + client.list_attachments(request) @pytest.mark.parametrize( "request_type", [ - vpcsc_config.GetVPCSCConfigRequest, + attachment.ListAttachmentsRequest, dict, ], ) -def test_get_vpcsc_config_rest_call_success(request_type): +def test_list_attachments_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/vpcscConfig"} + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vpcsc_config.VPCSCConfig( - name="name_value", - vpcsc_policy=vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, + return_value = attachment.ListAttachmentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -27248,20 +36810,19 @@ def test_get_vpcsc_config_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = vpcsc_config.VPCSCConfig.pb(return_value) + return_value = attachment.ListAttachmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_vpcsc_config(request) + response = client.list_attachments(request) # Establish that the response is the type that we expect. - assert isinstance(response, vpcsc_config.VPCSCConfig) - assert response.name == "name_value" - assert response.vpcsc_policy == vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY + assert isinstance(response, pagers.ListAttachmentsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_vpcsc_config_rest_interceptors(null_interceptor): +def test_list_attachments_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27275,14 +36836,14 @@ def test_get_vpcsc_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_get_vpcsc_config" + transports.ArtifactRegistryRestInterceptor, "post_list_attachments" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_get_vpcsc_config" + transports.ArtifactRegistryRestInterceptor, "pre_list_attachments" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vpcsc_config.GetVPCSCConfigRequest.pb( - vpcsc_config.GetVPCSCConfigRequest() + pb_message = attachment.ListAttachmentsRequest.pb( + attachment.ListAttachmentsRequest() ) transcode.return_value = { "method": "post", @@ -27293,18 +36854,20 @@ def test_get_vpcsc_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = vpcsc_config.VPCSCConfig.to_json(vpcsc_config.VPCSCConfig()) + return_value = attachment.ListAttachmentsResponse.to_json( + attachment.ListAttachmentsResponse() + ) req.return_value.content = return_value - request = vpcsc_config.GetVPCSCConfigRequest() + request = attachment.ListAttachmentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vpcsc_config.VPCSCConfig() + post.return_value = attachment.ListAttachmentsResponse() - client.get_vpcsc_config( + client.list_attachments( request, metadata=[ ("key", "val"), @@ -27316,15 +36879,13 @@ def test_get_vpcsc_config_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_vpcsc_config_rest_bad_request( - request_type=gda_vpcsc_config.UpdateVPCSCConfigRequest, -): +def test_get_attachment_rest_bad_request(request_type=attachment.GetAttachmentRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "vpcsc_config": {"name": "projects/sample1/locations/sample2/vpcscConfig"} + "name": "projects/sample1/locations/sample2/repositories/sample3/attachments/sample4" } request = request_type(**request_init) @@ -27339,35 +36900,171 @@ def test_update_vpcsc_config_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.update_vpcsc_config(request) + client.get_attachment(request) @pytest.mark.parametrize( "request_type", [ - gda_vpcsc_config.UpdateVPCSCConfigRequest, + attachment.GetAttachmentRequest, dict, ], ) -def test_update_vpcsc_config_rest_call_success(request_type): +def test_get_attachment_rest_call_success(request_type): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "vpcsc_config": {"name": "projects/sample1/locations/sample2/vpcscConfig"} + "name": "projects/sample1/locations/sample2/repositories/sample3/attachments/sample4" } - request_init["vpcsc_config"] = { - "name": "projects/sample1/locations/sample2/vpcscConfig", - "vpcsc_policy": 1, + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment.Attachment( + name="name_value", + target="target_value", + type_="type__value", + attachment_namespace="attachment_namespace_value", + files=["files_value"], + oci_version_name="oci_version_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = attachment.Attachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attachment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, attachment.Attachment) + assert response.name == "name_value" + assert response.target == "target_value" + assert response.type_ == "type__value" + assert response.attachment_namespace == "attachment_namespace_value" + assert response.files == ["files_value"] + assert response.oci_version_name == "oci_version_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_attachment_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_attachment" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_get_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attachment.GetAttachmentRequest.pb( + attachment.GetAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = attachment.Attachment.to_json(attachment.Attachment()) + req.return_value.content = return_value + + request = attachment.GetAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment.Attachment() + + client.get_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_attachment_rest_bad_request( + request_type=gda_attachment.CreateAttachmentRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_attachment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gda_attachment.CreateAttachmentRequest, + dict, + ], +) +def test_create_attachment_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/repositories/sample3"} + request_init["attachment"] = { + "name": "name_value", + "target": "target_value", + "type_": "type__value", + "attachment_namespace": "attachment_namespace_value", + "annotations": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "files": ["files_value1", "files_value2"], + "oci_version_name": "oci_version_name_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gda_vpcsc_config.UpdateVPCSCConfigRequest.meta.fields["vpcsc_config"] + test_field = gda_attachment.CreateAttachmentRequest.meta.fields["attachment"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -27395,7 +37092,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["vpcsc_config"].items(): # pragma: NO COVER + for field, value in request_init["attachment"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -27425,39 +37122,31 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["vpcsc_config"][field])): - del request_init["vpcsc_config"][field][i][subfield] + for i in range(0, len(request_init["attachment"][field])): + del request_init["attachment"][field][i][subfield] else: - del request_init["vpcsc_config"][field][subfield] + del request_init["attachment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gda_vpcsc_config.VPCSCConfig( - name="name_value", - vpcsc_policy=gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gda_vpcsc_config.VPCSCConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_vpcsc_config(request) + response = client.create_attachment(request) # Establish that the response is the type that we expect. - assert isinstance(response, gda_vpcsc_config.VPCSCConfig) - assert response.name == "name_value" - assert response.vpcsc_policy == gda_vpcsc_config.VPCSCConfig.VPCSCPolicy.DENY + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_vpcsc_config_rest_interceptors(null_interceptor): +def test_create_attachment_rest_interceptors(null_interceptor): transport = transports.ArtifactRegistryRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27471,14 +37160,16 @@ def test_update_vpcsc_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "post_update_vpcsc_config" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_create_attachment" ) as post, mock.patch.object( - transports.ArtifactRegistryRestInterceptor, "pre_update_vpcsc_config" + transports.ArtifactRegistryRestInterceptor, "pre_create_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gda_vpcsc_config.UpdateVPCSCConfigRequest.pb( - gda_vpcsc_config.UpdateVPCSCConfigRequest() + pb_message = gda_attachment.CreateAttachmentRequest.pb( + gda_attachment.CreateAttachmentRequest() ) transcode.return_value = { "method": "post", @@ -27489,20 +37180,137 @@ def test_update_vpcsc_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = gda_vpcsc_config.VPCSCConfig.to_json( - gda_vpcsc_config.VPCSCConfig() + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gda_attachment.CreateAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_attachment_rest_bad_request( + request_type=attachment.DeleteAttachmentRequest, +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/attachments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_attachment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + attachment.DeleteAttachmentRequest, + dict, + ], +) +def test_delete_attachment_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/repositories/sample3/attachments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_attachment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_attachment_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_attachment" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_delete_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attachment.DeleteAttachmentRequest.pb( + attachment.DeleteAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = gda_vpcsc_config.UpdateVPCSCConfigRequest() + request = attachment.DeleteAttachmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gda_vpcsc_config.VPCSCConfig() + post.return_value = operations_pb2.Operation() - client.update_vpcsc_config( + client.delete_attachment( request, metadata=[ ("key", "val"), @@ -28163,6 +37971,26 @@ def test_batch_delete_versions_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_version_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_version), "__call__") as call: + client.update_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_version.UpdateVersionRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_files_empty_call_rest(): @@ -28203,6 +38031,46 @@ def test_get_file_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_file_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + client.delete_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file.DeleteFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_file_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_file), "__call__") as call: + client.update_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_file.UpdateFileRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_tags_empty_call_rest(): @@ -28303,6 +38171,106 @@ def test_delete_tag_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_rule_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_rule), "__call__") as call: + client.create_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_rule.CreateRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_rules_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_rules), "__call__") as call: + client.list_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.ListRulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_rule_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_rule), "__call__") as call: + client.get_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.GetRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_rule_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_rule), "__call__") as call: + client.update_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_rule.UpdateRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_rule_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_rule), "__call__") as call: + client.delete_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = rule.DeleteRuleRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_set_iam_policy_empty_call_rest(): @@ -28451,6 +38419,110 @@ def test_update_vpcsc_config_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_package_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_package), "__call__") as call: + client.update_package(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_package.UpdatePackageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_attachments_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_attachments), "__call__") as call: + client.list_attachments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.ListAttachmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_attachment_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + client.get_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.GetAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_attachment_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_attachment), "__call__" + ) as call: + client.create_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gda_attachment.CreateAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_attachment_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_attachment), "__call__" + ) as call: + client.delete_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.DeleteAttachmentRequest() + + assert args[0] == request_msg + + def test_artifact_registry_rest_lro_client(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28523,13 +38595,21 @@ def test_artifact_registry_base_transport(): "get_version", "delete_version", "batch_delete_versions", + "update_version", "list_files", "get_file", + "delete_file", + "update_file", "list_tags", "get_tag", "create_tag", "update_tag", "delete_tag", + "create_rule", + "list_rules", + "get_rule", + "update_rule", + "delete_rule", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -28537,6 +38617,11 @@ def test_artifact_registry_base_transport(): "update_project_settings", "get_vpcsc_config", "update_vpcsc_config", + "update_package", + "list_attachments", + "get_attachment", + "create_attachment", + "delete_attachment", "get_location", "list_locations", "get_operation", @@ -28881,12 +38966,21 @@ def test_artifact_registry_client_transport_session_collision(transport_name): session1 = client1.transport.batch_delete_versions._session session2 = client2.transport.batch_delete_versions._session assert session1 != session2 + session1 = client1.transport.update_version._session + session2 = client2.transport.update_version._session + assert session1 != session2 session1 = client1.transport.list_files._session session2 = client2.transport.list_files._session assert session1 != session2 session1 = client1.transport.get_file._session session2 = client2.transport.get_file._session assert session1 != session2 + session1 = client1.transport.delete_file._session + session2 = client2.transport.delete_file._session + assert session1 != session2 + session1 = client1.transport.update_file._session + session2 = client2.transport.update_file._session + assert session1 != session2 session1 = client1.transport.list_tags._session session2 = client2.transport.list_tags._session assert session1 != session2 @@ -28902,6 +38996,21 @@ def test_artifact_registry_client_transport_session_collision(transport_name): session1 = client1.transport.delete_tag._session session2 = client2.transport.delete_tag._session assert session1 != session2 + session1 = client1.transport.create_rule._session + session2 = client2.transport.create_rule._session + assert session1 != session2 + session1 = client1.transport.list_rules._session + session2 = client2.transport.list_rules._session + assert session1 != session2 + session1 = client1.transport.get_rule._session + session2 = client2.transport.get_rule._session + assert session1 != session2 + session1 = client1.transport.update_rule._session + session2 = client2.transport.update_rule._session + assert session1 != session2 + session1 = client1.transport.delete_rule._session + session2 = client2.transport.delete_rule._session + assert session1 != session2 session1 = client1.transport.set_iam_policy._session session2 = client2.transport.set_iam_policy._session assert session1 != session2 @@ -28923,6 +39032,21 @@ def test_artifact_registry_client_transport_session_collision(transport_name): session1 = client1.transport.update_vpcsc_config._session session2 = client2.transport.update_vpcsc_config._session assert session1 != session2 + session1 = client1.transport.update_package._session + session2 = client2.transport.update_package._session + assert session1 != session2 + session1 = client1.transport.list_attachments._session + session2 = client2.transport.list_attachments._session + assert session1 != session2 + session1 = client1.transport.get_attachment._session + session2 = client2.transport.get_attachment._session + assert session1 != session2 + session1 = client1.transport.create_attachment._session + session2 = client2.transport.create_attachment._session + assert session1 != session2 + session1 = client1.transport.delete_attachment._session + session2 = client2.transport.delete_attachment._session + assert session1 != session2 def test_artifact_registry_grpc_transport_channel(): @@ -29116,11 +39240,42 @@ def test_parse_apt_artifact_path(): assert expected == actual -def test_docker_image_path(): +def test_attachment_path(): project = "winkle" location = "nautilus" repository = "scallop" - docker_image = "abalone" + attachment = "abalone" + expected = "projects/{project}/locations/{location}/repositories/{repository}/attachments/{attachment}".format( + project=project, + location=location, + repository=repository, + attachment=attachment, + ) + actual = ArtifactRegistryClient.attachment_path( + project, location, repository, attachment + ) + assert expected == actual + + +def test_parse_attachment_path(): + expected = { + "project": "squid", + "location": "clam", + "repository": "whelk", + "attachment": "octopus", + } + path = ArtifactRegistryClient.attachment_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_attachment_path(path) + assert expected == actual + + +def test_docker_image_path(): + project = "oyster" + location = "nudibranch" + repository = "cuttlefish" + docker_image = "mussel" expected = "projects/{project}/locations/{location}/repositories/{repository}/dockerImages/{docker_image}".format( project=project, location=location, @@ -29135,10 +39290,10 @@ def test_docker_image_path(): def test_parse_docker_image_path(): expected = { - "project": "squid", - "location": "clam", - "repository": "whelk", - "docker_image": "octopus", + "project": "winkle", + "location": "nautilus", + "repository": "scallop", + "docker_image": "abalone", } path = ArtifactRegistryClient.docker_image_path(**expected) @@ -29148,10 +39303,10 @@ def test_parse_docker_image_path(): def test_file_path(): - project = "oyster" - location = "nudibranch" - repository = "cuttlefish" - file = "mussel" + project = "squid" + location = "clam" + repository = "whelk" + file = "octopus" expected = "projects/{project}/locations/{location}/repositories/{repository}/files/{file}".format( project=project, location=location, @@ -29164,10 +39319,10 @@ def test_file_path(): def test_parse_file_path(): expected = { - "project": "winkle", - "location": "nautilus", - "repository": "scallop", - "file": "abalone", + "project": "oyster", + "location": "nudibranch", + "repository": "cuttlefish", + "file": "mussel", } path = ArtifactRegistryClient.file_path(**expected) @@ -29177,10 +39332,10 @@ def test_parse_file_path(): def test_maven_artifact_path(): - project = "squid" - location = "clam" - repository = "whelk" - maven_artifact = "octopus" + project = "winkle" + location = "nautilus" + repository = "scallop" + maven_artifact = "abalone" expected = "projects/{project}/locations/{location}/repositories/{repository}/mavenArtifacts/{maven_artifact}".format( project=project, location=location, @@ -29195,10 +39350,10 @@ def test_maven_artifact_path(): def test_parse_maven_artifact_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "repository": "cuttlefish", - "maven_artifact": "mussel", + "project": "squid", + "location": "clam", + "repository": "whelk", + "maven_artifact": "octopus", } path = ArtifactRegistryClient.maven_artifact_path(**expected) @@ -29208,10 +39363,10 @@ def test_parse_maven_artifact_path(): def test_npm_package_path(): - project = "winkle" - location = "nautilus" - repository = "scallop" - npm_package = "abalone" + project = "oyster" + location = "nudibranch" + repository = "cuttlefish" + npm_package = "mussel" expected = "projects/{project}/locations/{location}/repositories/{repository}/npmPackages/{npm_package}".format( project=project, location=location, @@ -29226,10 +39381,10 @@ def test_npm_package_path(): def test_parse_npm_package_path(): expected = { - "project": "squid", - "location": "clam", - "repository": "whelk", - "npm_package": "octopus", + "project": "winkle", + "location": "nautilus", + "repository": "scallop", + "npm_package": "abalone", } path = ArtifactRegistryClient.npm_package_path(**expected) @@ -29239,10 +39394,10 @@ def test_parse_npm_package_path(): def test_package_path(): - project = "oyster" - location = "nudibranch" - repository = "cuttlefish" - package = "mussel" + project = "squid" + location = "clam" + repository = "whelk" + package = "octopus" expected = "projects/{project}/locations/{location}/repositories/{repository}/packages/{package}".format( project=project, location=location, @@ -29255,10 +39410,10 @@ def test_package_path(): def test_parse_package_path(): expected = { - "project": "winkle", - "location": "nautilus", - "repository": "scallop", - "package": "abalone", + "project": "oyster", + "location": "nudibranch", + "repository": "cuttlefish", + "package": "mussel", } path = ArtifactRegistryClient.package_path(**expected) @@ -29268,7 +39423,7 @@ def test_parse_package_path(): def test_project_settings_path(): - project = "squid" + project = "winkle" expected = "projects/{project}/projectSettings".format( project=project, ) @@ -29278,7 +39433,7 @@ def test_project_settings_path(): def test_parse_project_settings_path(): expected = { - "project": "clam", + "project": "nautilus", } path = ArtifactRegistryClient.project_settings_path(**expected) @@ -29288,10 +39443,10 @@ def test_parse_project_settings_path(): def test_python_package_path(): - project = "whelk" - location = "octopus" - repository = "oyster" - python_package = "nudibranch" + project = "scallop" + location = "abalone" + repository = "squid" + python_package = "clam" expected = "projects/{project}/locations/{location}/repositories/{repository}/pythonPackages/{python_package}".format( project=project, location=location, @@ -29306,10 +39461,10 @@ def test_python_package_path(): def test_parse_python_package_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "repository": "winkle", - "python_package": "nautilus", + "project": "whelk", + "location": "octopus", + "repository": "oyster", + "python_package": "nudibranch", } path = ArtifactRegistryClient.python_package_path(**expected) @@ -29319,9 +39474,9 @@ def test_parse_python_package_path(): def test_repository_path(): - project = "scallop" - location = "abalone" - repository = "squid" + project = "cuttlefish" + location = "mussel" + repository = "winkle" expected = ( "projects/{project}/locations/{location}/repositories/{repository}".format( project=project, @@ -29335,9 +39490,9 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "clam", - "location": "whelk", - "repository": "octopus", + "project": "nautilus", + "location": "scallop", + "repository": "abalone", } path = ArtifactRegistryClient.repository_path(**expected) @@ -29346,10 +39501,39 @@ def test_parse_repository_path(): assert expected == actual +def test_rule_path(): + project = "squid" + location = "clam" + repository = "whelk" + rule = "octopus" + expected = "projects/{project}/locations/{location}/repositories/{repository}/rules/{rule}".format( + project=project, + location=location, + repository=repository, + rule=rule, + ) + actual = ArtifactRegistryClient.rule_path(project, location, repository, rule) + assert expected == actual + + +def test_parse_rule_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "repository": "cuttlefish", + "rule": "mussel", + } + path = ArtifactRegistryClient.rule_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_rule_path(path) + assert expected == actual + + def test_secret_version_path(): - project = "oyster" - secret = "nudibranch" - secret_version = "cuttlefish" + project = "winkle" + secret = "nautilus" + secret_version = "scallop" expected = "projects/{project}/secrets/{secret}/versions/{secret_version}".format( project=project, secret=secret, @@ -29361,9 +39545,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "mussel", - "secret": "winkle", - "secret_version": "nautilus", + "project": "abalone", + "secret": "squid", + "secret_version": "clam", } path = ArtifactRegistryClient.secret_version_path(**expected) @@ -29373,11 +39557,11 @@ def test_parse_secret_version_path(): def test_tag_path(): - project = "scallop" - location = "abalone" - repository = "squid" - package = "clam" - tag = "whelk" + project = "whelk" + location = "octopus" + repository = "oyster" + package = "nudibranch" + tag = "cuttlefish" expected = "projects/{project}/locations/{location}/repositories/{repository}/packages/{package}/tags/{tag}".format( project=project, location=location, @@ -29393,11 +39577,11 @@ def test_tag_path(): def test_parse_tag_path(): expected = { - "project": "octopus", - "location": "oyster", - "repository": "nudibranch", - "package": "cuttlefish", - "tag": "mussel", + "project": "mussel", + "location": "winkle", + "repository": "nautilus", + "package": "scallop", + "tag": "abalone", } path = ArtifactRegistryClient.tag_path(**expected) @@ -29407,11 +39591,11 @@ def test_parse_tag_path(): def test_version_path(): - project = "winkle" - location = "nautilus" - repository = "scallop" - package = "abalone" - version = "squid" + project = "squid" + location = "clam" + repository = "whelk" + package = "octopus" + version = "oyster" expected = "projects/{project}/locations/{location}/repositories/{repository}/packages/{package}/versions/{version}".format( project=project, location=location, @@ -29427,11 +39611,11 @@ def test_version_path(): def test_parse_version_path(): expected = { - "project": "clam", - "location": "whelk", - "repository": "octopus", - "package": "oyster", - "version": "nudibranch", + "project": "nudibranch", + "location": "cuttlefish", + "repository": "mussel", + "package": "winkle", + "version": "nautilus", } path = ArtifactRegistryClient.version_path(**expected) @@ -29441,8 +39625,8 @@ def test_parse_version_path(): def test_vpcsc_config_path(): - project = "cuttlefish" - location = "mussel" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}/vpcscConfig".format( project=project, location=location, @@ -29453,8 +39637,8 @@ def test_vpcsc_config_path(): def test_parse_vpcsc_config_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "squid", + "location": "clam", } path = ArtifactRegistryClient.vpcsc_config_path(**expected) @@ -29464,10 +39648,10 @@ def test_parse_vpcsc_config_path(): def test_yum_artifact_path(): - project = "scallop" - location = "abalone" - repository = "squid" - yum_artifact = "clam" + project = "whelk" + location = "octopus" + repository = "oyster" + yum_artifact = "nudibranch" expected = "projects/{project}/locations/{location}/repositories/{repository}/yumArtifacts/{yum_artifact}".format( project=project, location=location, @@ -29482,10 +39666,10 @@ def test_yum_artifact_path(): def test_parse_yum_artifact_path(): expected = { - "project": "whelk", - "location": "octopus", - "repository": "oyster", - "yum_artifact": "nudibranch", + "project": "cuttlefish", + "location": "mussel", + "repository": "winkle", + "yum_artifact": "nautilus", } path = ArtifactRegistryClient.yum_artifact_path(**expected) @@ -29495,7 +39679,7 @@ def test_parse_yum_artifact_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -29505,7 +39689,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "abalone", } path = ArtifactRegistryClient.common_billing_account_path(**expected) @@ -29515,7 +39699,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -29525,7 +39709,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "clam", } path = ArtifactRegistryClient.common_folder_path(**expected) @@ -29535,7 +39719,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -29545,7 +39729,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "octopus", } path = ArtifactRegistryClient.common_organization_path(**expected) @@ -29555,7 +39739,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -29565,7 +39749,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nudibranch", } path = ArtifactRegistryClient.common_project_path(**expected) @@ -29575,8 +39759,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -29587,8 +39771,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "winkle", + "location": "nautilus", } path = ArtifactRegistryClient.common_location_path(**expected) diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py index 164e14733131..049506cc9b08 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py @@ -344,86 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ArtifactRegistryClient, transports.ArtifactRegistryGrpcTransport, "grpc"), - (ArtifactRegistryClient, transports.ArtifactRegistryRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-asset/CHANGELOG.md b/packages/google-cloud-asset/CHANGELOG.md index 1fb8f229bbf6..a2b1971a5d85 100644 --- a/packages/google-cloud-asset/CHANGELOG.md +++ b/packages/google-cloud-asset/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## [3.27.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.27.0...google-cloud-asset-v3.27.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [3.27.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.4...google-cloud-asset-v3.27.0) (2024-10-24) diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 3d01951c9fa8..7af2fa694463 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.27.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 3d01951c9fa8..7af2fa694463 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.27.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py index 2b8df9993c3c..ab8f4a588fee 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py @@ -569,36 +569,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -608,13 +578,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AssetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 3d01951c9fa8..7af2fa694463 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.27.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py index 2fcc58f03271..ef68719ec45e 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py @@ -439,36 +439,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -478,13 +448,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AssetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 3d01951c9fa8..7af2fa694463 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.27.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py index e6ad044ef902..6a8d3d767b4a 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py @@ -455,36 +455,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -494,13 +464,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AssetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py index f0856cadb731..41c4b1117b02 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.27.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 3d01951c9fa8..7af2fa694463 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.27.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py index 63c2052ad74d..ce6688ec0ca9 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py @@ -505,36 +505,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -544,13 +514,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AssetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index c7fd85da945a..adaf8664d0c3 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.27.0" + "version": "3.27.1" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index 32920338db60..2842e3347574 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.27.0" + "version": "3.27.1" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 2201c8e0f90f..e542094192d6 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.27.0" + "version": "3.27.1" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 21a9be2abf2c..902ea38b5ad3 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.27.0" + "version": "3.27.1" }, "snippets": [ { diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py index 917eb9774d49..1be19ffc7a4e 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -318,86 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py index de10f93c61f2..186e045cb8ba 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py @@ -304,86 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py index 45f5d5e13e95..b37fea78b51c 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py @@ -304,86 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py index fb1bc8815620..016ff1d1040d 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py @@ -305,86 +305,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-assured-workloads/CHANGELOG.md b/packages/google-cloud-assured-workloads/CHANGELOG.md index 94edf9a49077..f6763ed52eda 100644 --- a/packages/google-cloud-assured-workloads/CHANGELOG.md +++ b/packages/google-cloud-assured-workloads/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.13.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-assured-workloads-v1.13.0...google-cloud-assured-workloads-v1.13.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([85c7512](https://github.com/googleapis/google-cloud-python/commit/85c7512bbdde2b9cc60b4ad42b8c36c4558a07a5)) + ## [1.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-assured-workloads-v1.12.5...google-cloud-assured-workloads-v1.13.0) (2024-10-24) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py index 43155ded0db3..0b9427f4e8a5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.0" # {x-release-please-version} +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py index 43155ded0db3..0b9427f4e8a5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.0" # {x-release-please-version} +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py index 1bf67c0c68e7..9e181474c909 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py @@ -493,36 +493,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -532,13 +502,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AssuredWorkloadsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py index 43155ded0db3..0b9427f4e8a5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.0" # {x-release-please-version} +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py index 1a3eec133330..49d2dac8051e 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py @@ -471,36 +471,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -510,13 +480,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AssuredWorkloadsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json index 124869c8d53a..329203fab093 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.13.0" + "version": "1.13.1" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json index d457c182715b..f857c8897b0f 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.13.0" + "version": "1.13.1" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py index 772b3ebd44e0..7ba335b1db84 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py @@ -348,94 +348,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AssuredWorkloadsServiceClient, - transports.AssuredWorkloadsServiceGrpcTransport, - "grpc", - ), - ( - AssuredWorkloadsServiceClient, - transports.AssuredWorkloadsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py index 0f073e6af917..94166e9a5540 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py @@ -348,94 +348,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AssuredWorkloadsServiceClient, - transports.AssuredWorkloadsServiceGrpcTransport, - "grpc", - ), - ( - AssuredWorkloadsServiceClient, - transports.AssuredWorkloadsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-automl/CHANGELOG.md b/packages/google-cloud-automl/CHANGELOG.md index 2d187a609841..419595e14315 100644 --- a/packages/google-cloud-automl/CHANGELOG.md +++ b/packages/google-cloud-automl/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-automl/#history +## [2.14.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-automl-v2.14.0...google-cloud-automl-v2.14.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [2.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-automl-v2.13.5...google-cloud-automl-v2.14.0) (2024-10-24) diff --git a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py index 773005b00ca9..2523dfbe9e23 100644 --- a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py index 773005b00ca9..2523dfbe9e23 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py index f75547d5642f..187d7c17a984 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py @@ -564,36 +564,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AutoMlClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -603,13 +573,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AutoMlClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py index b8d837686197..ba4d427b59db 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py @@ -473,36 +473,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -512,13 +482,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PredictionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py index 773005b00ca9..2523dfbe9e23 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py index cb77a70222f4..06f15fd1ec6f 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py @@ -623,36 +623,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AutoMlClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -662,13 +632,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AutoMlClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py index b52f0c1c9c30..23ab0d0fe0ee 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py @@ -473,36 +473,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -512,13 +482,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PredictionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json index 2bd37e62c638..49126fa3a89d 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.14.0" + "version": "2.14.1" }, "snippets": [ { diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json index fad8cd1411fa..a123a7bf2218 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.14.0" + "version": "2.14.1" }, "snippets": [ { diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py index 8953f9f75560..b93fc9fb0fbc 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py @@ -309,86 +309,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc"), - (AutoMlClient, transports.AutoMlRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py index eb0968b4e1cf..619c77d51854 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py @@ -344,86 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py index 1f591ddbef56..bf1e2924313b 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py @@ -319,86 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc"), - (AutoMlClient, transports.AutoMlRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py index 5ba19a19143b..16fc61240b14 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py @@ -345,86 +345,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-backupdr/CHANGELOG.md b/packages/google-cloud-backupdr/CHANGELOG.md index 51f47d25fb7b..756c19694ada 100644 --- a/packages/google-cloud-backupdr/CHANGELOG.md +++ b/packages/google-cloud-backupdr/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.5...google-cloud-backupdr-v0.1.6) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.1.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.4...google-cloud-backupdr-v0.1.5) (2024-10-24) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index 62543a3803a5..cad43136427b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -598,36 +598,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BackupDRClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -637,13 +607,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BackupDRClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index f1bc38934c4e..4164339aa8cf 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.1.5" + "version": "0.1.6" }, "snippets": [ { diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 64bbae698abd..2a0ff4a24856 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -320,86 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BackupDRClient, transports.BackupDRGrpcTransport, "grpc"), - (BackupDRClient, transports.BackupDRRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bare-metal-solution/CHANGELOG.md b/packages/google-cloud-bare-metal-solution/CHANGELOG.md index 6a0534bd333c..c676874a043b 100644 --- a/packages/google-cloud-bare-metal-solution/CHANGELOG.md +++ b/packages/google-cloud-bare-metal-solution/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.8.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bare-metal-solution-v1.8.0...google-cloud-bare-metal-solution-v1.8.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [1.8.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bare-metal-solution-v1.7.5...google-cloud-bare-metal-solution-v1.8.0) (2024-10-24) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py index 4b114d153974..02874f69f4e5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "1.8.1" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py index 4b114d153974..02874f69f4e5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "1.8.1" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py index c5896f656da2..60f10dbb0d74 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py @@ -830,36 +830,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BareMetalSolutionClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -869,13 +839,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BareMetalSolutionClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json index 649bd3e5c000..a1ea0708b18d 100644 --- a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bare-metal-solution", - "version": "1.8.0" + "version": "1.8.1" }, "snippets": [ { diff --git a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py index 5a3c21a2ed2c..08d3d0b5e4ed 100644 --- a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py +++ b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py @@ -360,86 +360,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport, "grpc"), - (BareMetalSolutionClient, transports.BareMetalSolutionRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index 450b76079b16..b52788362d52 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.17.31](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.30...google-cloud-batch-v0.17.31) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.17.30](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.29...google-cloud-batch-v0.17.30) (2024-10-24) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 71eef09933f2..b6bd82a78613 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.30" # {x-release-please-version} +__version__ = "0.17.31" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 71eef09933f2..b6bd82a78613 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.30" # {x-release-please-version} +__version__ = "0.17.31" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py index aa5242f0130a..22a7a7de0cb6 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py @@ -522,36 +522,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BatchServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -561,13 +531,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BatchServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 71eef09933f2..b6bd82a78613 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.30" # {x-release-please-version} +__version__ = "0.17.31" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index 81bc50a2c342..b8db242b68b7 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -550,36 +550,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BatchServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -589,13 +559,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BatchServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index cff9c2aa0e65..ee4fbd362887 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.30" + "version": "0.17.31" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index f2488f4d433e..f52304810a06 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.30" + "version": "0.17.31" }, "snippets": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 288cdbdc16e4..6f372d5128c2 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -320,86 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index 346750b7a162..d978de4f7fba 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -327,86 +327,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md b/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md index 1ecda6b2271b..4a1308ce8490 100644 --- a/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnections-v0.4.12...google-cloud-beyondcorp-appconnections-v0.4.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnections-v0.4.11...google-cloud-beyondcorp-appconnections-v0.4.12) (2024-10-24) diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py index afa938f3fc58..ffdcb843d0ff 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py @@ -535,36 +535,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AppConnectionsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -574,13 +544,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AppConnectionsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json index 74f340dcda3d..2409ab52fa34 100644 --- a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json +++ b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnections", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py index 90ad61c89371..94266beb90a2 100644 --- a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py +++ b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py @@ -352,94 +352,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AppConnectionsServiceClient, - transports.AppConnectionsServiceGrpcTransport, - "grpc", - ), - ( - AppConnectionsServiceClient, - transports.AppConnectionsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md b/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md index 0d279593f877..2eb8e5d3c6f8 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnectors-v0.4.12...google-cloud-beyondcorp-appconnectors-v0.4.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnectors-v0.4.11...google-cloud-beyondcorp-appconnectors-v0.4.12) (2024-10-24) diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py index a73843d7ad5b..58ab8fef2955 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py @@ -491,36 +491,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AppConnectorsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -530,13 +500,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AppConnectorsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json index f55f3a60f358..4c4360126238 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json +++ b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnectors", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py index 9230a4789b59..3813dd6c1eab 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py +++ b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py @@ -355,94 +355,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AppConnectorsServiceClient, - transports.AppConnectorsServiceGrpcTransport, - "grpc", - ), - ( - AppConnectorsServiceClient, - transports.AppConnectorsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md b/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md index 8383a6f906e9..f75afc01d50c 100644 --- a/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appgateways-v0.4.12...google-cloud-beyondcorp-appgateways-v0.4.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appgateways-v0.4.11...google-cloud-beyondcorp-appgateways-v0.4.12) (2024-10-24) diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py index 4070c7174fdf..8b20ee88a3b8 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py @@ -486,36 +486,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AppGatewaysServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -525,13 +495,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AppGatewaysServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json index b9984c755b2a..ed4543c0a53d 100644 --- a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json +++ b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appgateways", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py index 5b3b7a0ea869..f115c48d43da 100644 --- a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py +++ b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py @@ -347,86 +347,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport, "grpc"), - (AppGatewaysServiceClient, transports.AppGatewaysServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md b/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md index aa8f89d52ac1..415c0a74fa10 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientconnectorservices-v0.4.12...google-cloud-beyondcorp-clientconnectorservices-v0.4.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientconnectorservices-v0.4.11...google-cloud-beyondcorp-clientconnectorservices-v0.4.12) (2024-10-24) diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py index 1debb047061f..b84831bdbecf 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py @@ -497,36 +497,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ClientConnectorServicesServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -536,13 +506,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ClientConnectorServicesServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json index d68ab0ef1ce6..eaf0d4846226 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientconnectorservices", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py index 49ebbb7654f9..dbef79a4e165 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py @@ -369,94 +369,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ClientConnectorServicesServiceClient, - transports.ClientConnectorServicesServiceGrpcTransport, - "grpc", - ), - ( - ClientConnectorServicesServiceClient, - transports.ClientConnectorServicesServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md b/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md index c29dffc58180..7508f6c3d258 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientgateways-v0.4.11...google-cloud-beyondcorp-clientgateways-v0.4.12) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.4.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientgateways-v0.4.10...google-cloud-beyondcorp-clientgateways-v0.4.11) (2024-10-24) diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py index 5feceb32bedf..db2ef16a95a4 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.4.12" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py index 5feceb32bedf..db2ef16a95a4 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.11" # {x-release-please-version} +__version__ = "0.4.12" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py index a0ecd8a65ae9..32db262535ce 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py @@ -485,36 +485,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ClientGatewaysServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -524,13 +494,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ClientGatewaysServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json index 48690602b31c..477a6153540f 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json +++ b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientgateways", - "version": "0.4.11" + "version": "0.4.12" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py index 790708a76e0d..269a04e975e5 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py +++ b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py @@ -351,94 +351,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ClientGatewaysServiceClient, - transports.ClientGatewaysServiceGrpcTransport, - "grpc", - ), - ( - ClientGatewaysServiceClient, - transports.ClientGatewaysServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md b/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md index 9046e5e4b3da..023f68511fc5 100644 --- a/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md +++ b/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-analyticshub-v0.4.12...google-cloud-bigquery-analyticshub-v0.4.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-analyticshub-v0.4.11...google-cloud-bigquery-analyticshub-v0.4.12) (2024-10-24) diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py index c9dabbcb36ef..3d2fd8b3bbe1 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py @@ -560,36 +560,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -599,13 +569,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AnalyticsHubServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json index ea627028f61e..37e294320dde 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-analyticshub", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py index 4675dc9d6199..98c4565af87b 100644 --- a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py @@ -346,89 +346,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AnalyticsHubServiceClient, - transports.AnalyticsHubServiceGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-biglake/CHANGELOG.md b/packages/google-cloud-bigquery-biglake/CHANGELOG.md index 5da23095f8f3..a9c29575f39d 100644 --- a/packages/google-cloud-bigquery-biglake/CHANGELOG.md +++ b/packages/google-cloud-bigquery-biglake/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.4.10...google-cloud-bigquery-biglake-v0.4.11) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.4.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.4.9...google-cloud-bigquery-biglake-v0.4.10) (2024-10-24) diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py index 792f80c59ee5..5feceb32bedf 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.10" # {x-release-please-version} +__version__ = "0.4.11" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py index 792f80c59ee5..5feceb32bedf 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.10" # {x-release-please-version} +__version__ = "0.4.11" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py index 37fc63db909f..fa45ceafe1e3 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py @@ -525,36 +525,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -564,13 +534,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MetastoreServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py index 792f80c59ee5..5feceb32bedf 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.10" # {x-release-please-version} +__version__ = "0.4.11" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py index 0893a12db218..74e3fad5eb61 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py @@ -551,36 +551,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -590,13 +560,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MetastoreServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json index 75f95af4287d..a32c80dfc1c9 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.10" + "version": "0.4.11" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json index f256ed3c2b14..9125bc437013 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.10" + "version": "0.4.11" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py index ea0bf42d68c1..75a521c0a1f1 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py @@ -321,86 +321,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py index d8b7a6b4cae9..68237c0ee58a 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py @@ -321,86 +321,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-connection/CHANGELOG.md b/packages/google-cloud-bigquery-connection/CHANGELOG.md index ef0061729ec2..11e3020d1ffa 100644 --- a/packages/google-cloud-bigquery-connection/CHANGELOG.md +++ b/packages/google-cloud-bigquery-connection/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.16.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-connection-v1.16.0...google-cloud-bigquery-connection-v1.16.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-connection-v1.15.5...google-cloud-bigquery-connection-v1.16.0) (2024-10-24) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py index 3e0ea3b28f0a..b6e92d4eebd5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py index 3e0ea3b28f0a..b6e92d4eebd5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py index 4dd767c16bad..f13986e90deb 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py @@ -512,36 +512,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConnectionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -551,13 +521,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConnectionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json index 4059428c848c..b942d3e38505 100644 --- a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json +++ b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-connection", - "version": "1.16.0" + "version": "1.16.1" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py index 5053176e8d49..b5db651971ac 100644 --- a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py @@ -333,86 +333,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), - (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md b/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md index ae9f995cb028..90ba2ac069a0 100644 --- a/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md +++ b/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.5.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-data-exchange-v0.5.14...google-cloud-bigquery-data-exchange-v0.5.15) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.5.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-data-exchange-v0.5.13...google-cloud-bigquery-data-exchange-v0.5.14) (2024-10-24) diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py index 83295d49161c..a51abb3756e4 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py @@ -514,36 +514,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -553,13 +523,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AnalyticsHubServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json index b60d6300e315..622338e9867c 100644 --- a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json +++ b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-data-exchange", - "version": "0.5.14" + "version": "0.5.15" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py index 175370abe5c4..2840816a64cc 100644 --- a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py @@ -335,89 +335,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AnalyticsHubServiceClient, - transports.AnalyticsHubServiceGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md b/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md index 3835827d04f6..e6a496e2ec18 100644 --- a/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.6.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.6.9...google-cloud-bigquery-datapolicies-v0.6.10) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.6.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.6.8...google-cloud-bigquery-datapolicies-v0.6.9) (2024-10-24) diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py index 50a82cf1652f..33d22df0b8b4 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py @@ -469,36 +469,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -508,13 +478,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataPolicyServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py index c7e5e6aa27a8..a71fefd1fcc2 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py @@ -469,36 +469,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -508,13 +478,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataPolicyServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json index 7a4fc23b1f0f..5dba0dd52f72 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.9" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json index 83ef5aa3e557..f1f0fc45f50f 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.9" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py index 230d94d95a7b..289587bb321e 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py @@ -332,86 +332,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc"), - (DataPolicyServiceClient, transports.DataPolicyServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py index 284607a51c22..a9ac78315857 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py @@ -327,85 +327,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index dc98246fb117..7582709490c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.17.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.17.0...google-cloud-bigquery-datatransfer-v3.17.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [3.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.16.0...google-cloud-bigquery-datatransfer-v3.17.0) (2024-10-24) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index d2b82f793e78..6942500960db 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.17.0" # {x-release-please-version} +__version__ = "3.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index d2b82f793e78..6942500960db 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.17.0" # {x-release-please-version} +__version__ = "3.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 15df58ebffa6..3f6bf741deed 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -509,36 +509,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataTransferServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -548,13 +518,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataTransferServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index d731f68dc184..fb91bc666764 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.17.0" + "version": "3.17.1" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 705721cba2ba..48c9b73e15b0 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -341,94 +341,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DataTransferServiceClient, - transports.DataTransferServiceGrpcTransport, - "grpc", - ), - ( - DataTransferServiceClient, - transports.DataTransferServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-migration/CHANGELOG.md b/packages/google-cloud-bigquery-migration/CHANGELOG.md index 883dd9e7712a..f860496f6900 100644 --- a/packages/google-cloud-bigquery-migration/CHANGELOG.md +++ b/packages/google-cloud-bigquery-migration/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.11.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-migration-v0.11.10...google-cloud-bigquery-migration-v0.11.11) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [0.11.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-migration-v0.11.9...google-cloud-bigquery-migration-v0.11.10) (2024-10-24) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py index 8914d3106fe9..11e34cec2824 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.10" # {x-release-please-version} +__version__ = "0.11.11" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py index 8914d3106fe9..11e34cec2824 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.10" # {x-release-please-version} +__version__ = "0.11.11" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py index 87b6df7fd5f2..0db4106dafbf 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py @@ -491,36 +491,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -530,13 +500,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MigrationServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py index 8914d3106fe9..11e34cec2824 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.10" # {x-release-please-version} +__version__ = "0.11.11" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index 32164e645021..5a2c7a340e99 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -491,36 +491,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -530,13 +500,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MigrationServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py index ff48b7f2af4b..54c1064a5a02 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SqlTranslationServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SqlTranslationServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json index bfe515847079..53768f71bcd4 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.10" + "version": "0.11.11" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json index c9c8e2f64bce..e6977344f9bb 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.10" + "version": "0.11.11" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py index 70ab3e7828b3..52e18ce616db 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index 1d2d709e5364..ec6917df7b1c 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -325,85 +325,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py index 5301798ce0ce..b4d046a3b632 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py @@ -329,89 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - SqlTranslationServiceClient, - transports.SqlTranslationServiceGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-reservation/CHANGELOG.md b/packages/google-cloud-bigquery-reservation/CHANGELOG.md index b3fa55e51add..463f17384501 100644 --- a/packages/google-cloud-bigquery-reservation/CHANGELOG.md +++ b/packages/google-cloud-bigquery-reservation/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.14.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-reservation-v1.14.0...google-cloud-bigquery-reservation-v1.14.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-reservation-v1.13.5...google-cloud-bigquery-reservation-v1.14.0) (2024-10-24) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py index 2159c8af6f8e..231f5cf041ff 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py index 2159c8af6f8e..231f5cf041ff 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py index 30e6b9de73dd..d23e2c9f8879 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py @@ -550,36 +550,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ReservationServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -589,13 +559,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ReservationServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json index c78b07a08ba9..c3418e392fa3 100644 --- a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-reservation", - "version": "1.14.0" + "version": "1.14.1" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py index 136210005cb9..63c6e54d04e3 100644 --- a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -336,86 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"), - (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-billing-budgets/CHANGELOG.md b/packages/google-cloud-billing-budgets/CHANGELOG.md index 6e6f8814c459..871b40c862b9 100644 --- a/packages/google-cloud-billing-budgets/CHANGELOG.md +++ b/packages/google-cloud-billing-budgets/CHANGELOG.md @@ -1,4 +1,11 @@ # Changelog +## [1.15.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-budgets-v1.15.0...google-cloud-billing-budgets-v1.15.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-budgets-v1.14.5...google-cloud-billing-budgets-v1.15.0) (2024-10-24) diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py index cf18a472a8a2..2fd2bb1630b4 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py index cf18a472a8a2..2fd2bb1630b4 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py index 2095fbefdf66..a87f5383108a 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py @@ -461,36 +461,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -500,13 +470,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BudgetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py index cf18a472a8a2..2fd2bb1630b4 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py index 8407b63a3c79..40d7aa259418 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py @@ -457,36 +457,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -496,13 +466,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BudgetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json index cdc6ceff3922..c53756bde983 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.15.0" + "version": "1.15.1" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json index 1437a9f2095d..a3d3721a6a97 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.15.0" + "version": "1.15.1" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py index a1a7f146bd54..0e2ce042a50f 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py @@ -318,86 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc"), - (BudgetServiceClient, transports.BudgetServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py index 95b5422338cf..5b19669c64b4 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py @@ -313,85 +313,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-billing/CHANGELOG.md b/packages/google-cloud-billing/CHANGELOG.md index c15dcc6b6eec..ba23ce3cdef0 100644 --- a/packages/google-cloud-billing/CHANGELOG.md +++ b/packages/google-cloud-billing/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.14.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.14.0...google-cloud-billing-v1.14.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.13.6...google-cloud-billing-v1.14.0) (2024-10-24) diff --git a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py index 2159c8af6f8e..231f5cf041ff 100644 --- a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py index 2159c8af6f8e..231f5cf041ff 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py index c31f665736cb..cc1f995bfa12 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py @@ -502,36 +502,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudBillingClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -541,13 +511,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudBillingClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py index 864deed1c10e..60d8764dea8f 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py @@ -472,36 +472,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudCatalogClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -511,13 +481,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudCatalogClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json index df7ec1193f00..fda2838313c5 100644 --- a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json +++ b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing", - "version": "1.14.0" + "version": "1.14.1" }, "snippets": [ { diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py index 4d7e3bf7ae77..e46f30dba6d2 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py @@ -308,86 +308,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudBillingClient, transports.CloudBillingGrpcTransport, "grpc"), - (CloudBillingClient, transports.CloudBillingRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py index a4e54ccc61c2..e653309e24b8 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py @@ -304,86 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudCatalogClient, transports.CloudCatalogGrpcTransport, "grpc"), - (CloudCatalogClient, transports.CloudCatalogRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-binary-authorization/CHANGELOG.md b/packages/google-cloud-binary-authorization/CHANGELOG.md index 90e90fde7d93..ddaecb8ce1ed 100644 --- a/packages/google-cloud-binary-authorization/CHANGELOG.md +++ b/packages/google-cloud-binary-authorization/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.11.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-binary-authorization-v1.11.0...google-cloud-binary-authorization-v1.11.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([4ed4108](https://github.com/googleapis/google-cloud-python/commit/4ed41088ab3cbadfe4de7fa170f172666015ed24)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-binary-authorization-v1.10.5...google-cloud-binary-authorization-v1.11.0) (2024-10-24) diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py index 50d842f376d0..b50cada0b7ee 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py index 50d842f376d0..b50cada0b7ee 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py index 91142cfbeb1e..bdc3fbb0f4ef 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py @@ -488,36 +488,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BinauthzManagementServiceV1Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -527,13 +497,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BinauthzManagementServiceV1Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py index 592df00e4ec6..c1194731d1ea 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py @@ -455,36 +455,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SystemPolicyV1Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -494,13 +464,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SystemPolicyV1Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py index bf501c96eb59..3bbd7927dfe1 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ValidationHelperV1Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ValidationHelperV1Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py index 50d842f376d0..b50cada0b7ee 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py index a2e1b58e9735..9a7a80609bcc 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py @@ -491,36 +491,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BinauthzManagementServiceV1Beta1Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -530,13 +500,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BinauthzManagementServiceV1Beta1Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py index b4df4ff9e7e8..41cb4ed15afe 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py @@ -455,36 +455,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SystemPolicyV1Beta1Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -494,13 +464,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SystemPolicyV1Beta1Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json index acf21ce3b6be..1c782ac5a11a 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.11.0" + "version": "1.11.1" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json index 481afb7cf4e2..076840734bfd 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.11.0" + "version": "1.11.1" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py index d469281e147a..dbf9b366a305 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py @@ -344,94 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - BinauthzManagementServiceV1Client, - transports.BinauthzManagementServiceV1GrpcTransport, - "grpc", - ), - ( - BinauthzManagementServiceV1Client, - transports.BinauthzManagementServiceV1RestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py index f3eb91b24edb..238de5153e22 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py @@ -318,86 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SystemPolicyV1Client, transports.SystemPolicyV1GrpcTransport, "grpc"), - (SystemPolicyV1Client, transports.SystemPolicyV1RestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py index 3c849bed4b50..deef7b1a5d01 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py @@ -330,86 +330,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ValidationHelperV1Client, transports.ValidationHelperV1GrpcTransport, "grpc"), - (ValidationHelperV1Client, transports.ValidationHelperV1RestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py index 286b7f65655a..69ad9bc3eb1e 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py @@ -355,94 +355,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - BinauthzManagementServiceV1Beta1Client, - transports.BinauthzManagementServiceV1Beta1GrpcTransport, - "grpc", - ), - ( - BinauthzManagementServiceV1Beta1Client, - transports.BinauthzManagementServiceV1Beta1RestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py index 4016f5595c10..a1f7defa50ae 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py @@ -333,94 +333,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - SystemPolicyV1Beta1Client, - transports.SystemPolicyV1Beta1GrpcTransport, - "grpc", - ), - ( - SystemPolicyV1Beta1Client, - transports.SystemPolicyV1Beta1RestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-build/CHANGELOG.md b/packages/google-cloud-build/CHANGELOG.md index 5b50cddc2b10..bbcd9e10a661 100644 --- a/packages/google-cloud-build/CHANGELOG.md +++ b/packages/google-cloud-build/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-build/#history +## [3.27.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.27.0...google-cloud-build-v3.27.1) (2024-11-11) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + +## [3.27.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.26.0...google-cloud-build-v3.27.0) (2024-10-28) + + +### Features + +* [google-cloud-build] Add PrivateServiceConnect option to WorkerPool ([#13221](https://github.com/googleapis/google-cloud-python/issues/13221)) ([629b927](https://github.com/googleapis/google-cloud-python/commit/629b927a0ec0c3342a0d22a344b15afb41cf5e37)) + ## [3.26.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.25.0...google-cloud-build-v3.26.0) (2024-10-24) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py index 1fe6836b543e..7af2fa694463 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py index 1fe6836b543e..7af2fa694463 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index 9add55b5b6a1..d617fec2eb4a 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -91,6 +91,10 @@ class CloudBuildAsyncClient: ) network_path = staticmethod(CloudBuildClient.network_path) parse_network_path = staticmethod(CloudBuildClient.parse_network_path) + network_attachment_path = staticmethod(CloudBuildClient.network_attachment_path) + parse_network_attachment_path = staticmethod( + CloudBuildClient.parse_network_attachment_path + ) repository_path = staticmethod(CloudBuildClient.repository_path) parse_repository_path = staticmethod(CloudBuildClient.parse_repository_path) secret_version_path = staticmethod(CloudBuildClient.secret_version_path) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 2a69fbe66407..b851a7ccd636 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -292,6 +292,28 @@ def parse_network_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def network_attachment_path( + project: str, + region: str, + networkattachment: str, + ) -> str: + """Returns a fully-qualified network_attachment string.""" + return "projects/{project}/regions/{region}/networkAttachments/{networkattachment}".format( + project=project, + region=region, + networkattachment=networkattachment, + ) + + @staticmethod + def parse_network_attachment_path(path: str) -> Dict[str, str]: + """Parses a network_attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/networkAttachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def repository_path( project: str, @@ -674,36 +696,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudBuildClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -713,13 +705,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudBuildClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index 995ae202614c..f2c8afd90148 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -3702,6 +3702,9 @@ class PrivatePoolV1Config(proto.Message): pool. network_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig): Network configuration for the pool. + private_service_connect (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.PrivateServiceConnect): + Immutable. Private Service Connect(PSC) + Network configuration for the pool. """ class WorkerConfig(proto.Message): @@ -3791,6 +3794,55 @@ class EgressOption(proto.Enum): number=3, ) + class PrivateServiceConnect(proto.Message): + r"""Defines the Private Service Connect network configuration for + the pool. + + Attributes: + network_attachment (str): + Required. Immutable. The network attachment that the worker + network interface is peered to. Must be in the format + ``projects/{project}/regions/{region}/networkAttachments/{networkAttachment}``. + The region of network attachment must be the same as the + worker pool. See `Network + Attachments `__ + public_ip_address_disabled (bool): + Required. Immutable. Disable public IP on the primary + network interface. + + If true, workers are created without any public address, + which prevents network egress to public IPs unless a network + proxy is configured. If false, workers are created with a + public address which allows for public internet egress. The + public address only applies to traffic through the primary + network interface. If ``route_all_traffic`` is set to true, + all traffic will go through the non-primary network + interface, this boolean has no effect. + route_all_traffic (bool): + Immutable. Route all traffic through PSC + interface. Enable this if you want full control + of traffic in the private pool. Configure Cloud + NAT for the subnet of network attachment if you + need to access public Internet. + + If false, Only route private IPs, e.g. + 10.0.0.0/8, 172.16.0.0/12, and 192.168.0.0/16 + through PSC interface. + """ + + network_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + public_ip_address_disabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + route_all_traffic: bool = proto.Field( + proto.BOOL, + number=3, + ) + worker_config: WorkerConfig = proto.Field( proto.MESSAGE, number=1, @@ -3801,6 +3853,11 @@ class EgressOption(proto.Enum): number=2, message=NetworkConfig, ) + private_service_connect: PrivateServiceConnect = proto.Field( + proto.MESSAGE, + number=5, + message=PrivateServiceConnect, + ) class CreateWorkerPoolRequest(proto.Message): diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py index 1fe6836b543e..7af2fa694463 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.0" # {x-release-please-version} +__version__ = "3.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py index e8f2725e4880..bd2250d96314 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py @@ -543,36 +543,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RepositoryManagerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -582,13 +552,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RepositoryManagerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index b56301bb509f..18e83f8ba807 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.26.0" + "version": "3.27.1" }, "snippets": [ { diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index 94f2013739af..e71bcd97a913 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.26.0" + "version": "3.27.1" }, "snippets": [ { diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 7c46b84667ee..c03d84e29e05 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -311,86 +311,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -15350,6 +15270,11 @@ def test_create_worker_pool_rest_call_success(request_type): "egress_option": 1, "peered_network_ip_range": "peered_network_ip_range_value", }, + "private_service_connect": { + "network_attachment": "network_attachment_value", + "public_ip_address_disabled": True, + "route_all_traffic": True, + }, }, "etag": "etag_value", } @@ -15802,6 +15727,11 @@ def test_update_worker_pool_rest_call_success(request_type): "egress_option": 1, "peered_network_ip_range": "peered_network_ip_range_value", }, + "private_service_connect": { + "network_attachment": "network_attachment_value", + "public_ip_address_disabled": True, + "route_all_traffic": True, + }, }, "etag": "etag_value", } @@ -17607,11 +17537,39 @@ def test_parse_network_path(): assert expected == actual -def test_repository_path(): +def test_network_attachment_path(): project = "squid" - location = "clam" - connection = "whelk" - repository = "octopus" + region = "clam" + networkattachment = "whelk" + expected = "projects/{project}/regions/{region}/networkAttachments/{networkattachment}".format( + project=project, + region=region, + networkattachment=networkattachment, + ) + actual = CloudBuildClient.network_attachment_path( + project, region, networkattachment + ) + assert expected == actual + + +def test_parse_network_attachment_path(): + expected = { + "project": "octopus", + "region": "oyster", + "networkattachment": "nudibranch", + } + path = CloudBuildClient.network_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_network_attachment_path(path) + assert expected == actual + + +def test_repository_path(): + project = "cuttlefish" + location = "mussel" + connection = "winkle" + repository = "nautilus" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -17624,10 +17582,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "connection": "cuttlefish", - "repository": "mussel", + "project": "scallop", + "location": "abalone", + "connection": "squid", + "repository": "clam", } path = CloudBuildClient.repository_path(**expected) @@ -17637,9 +17595,9 @@ def test_parse_repository_path(): def test_secret_version_path(): - project = "winkle" - secret = "nautilus" - version = "scallop" + project = "whelk" + secret = "octopus" + version = "oyster" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -17651,9 +17609,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "abalone", - "secret": "squid", - "version": "clam", + "project": "nudibranch", + "secret": "cuttlefish", + "version": "mussel", } path = CloudBuildClient.secret_version_path(**expected) @@ -17663,8 +17621,8 @@ def test_parse_secret_version_path(): def test_service_account_path(): - project = "whelk" - service_account = "octopus" + project = "winkle" + service_account = "nautilus" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -17675,8 +17633,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "oyster", - "service_account": "nudibranch", + "project": "scallop", + "service_account": "abalone", } path = CloudBuildClient.service_account_path(**expected) @@ -17686,8 +17644,8 @@ def test_parse_service_account_path(): def test_subscription_path(): - project = "cuttlefish" - subscription = "mussel" + project = "squid" + subscription = "clam" expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, @@ -17698,8 +17656,8 @@ def test_subscription_path(): def test_parse_subscription_path(): expected = { - "project": "winkle", - "subscription": "nautilus", + "project": "whelk", + "subscription": "octopus", } path = CloudBuildClient.subscription_path(**expected) @@ -17709,8 +17667,8 @@ def test_parse_subscription_path(): def test_topic_path(): - project = "scallop" - topic = "abalone" + project = "oyster" + topic = "nudibranch" expected = "projects/{project}/topics/{topic}".format( project=project, topic=topic, @@ -17721,8 +17679,8 @@ def test_topic_path(): def test_parse_topic_path(): expected = { - "project": "squid", - "topic": "clam", + "project": "cuttlefish", + "topic": "mussel", } path = CloudBuildClient.topic_path(**expected) @@ -17732,9 +17690,9 @@ def test_parse_topic_path(): def test_worker_pool_path(): - project = "whelk" - location = "octopus" - worker_pool = "oyster" + project = "winkle" + location = "nautilus" + worker_pool = "scallop" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -17748,9 +17706,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "worker_pool": "mussel", + "project": "abalone", + "location": "squid", + "worker_pool": "clam", } path = CloudBuildClient.worker_pool_path(**expected) @@ -17760,7 +17718,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -17770,7 +17728,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "octopus", } path = CloudBuildClient.common_billing_account_path(**expected) @@ -17780,7 +17738,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -17790,7 +17748,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nudibranch", } path = CloudBuildClient.common_folder_path(**expected) @@ -17800,7 +17758,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -17810,7 +17768,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "mussel", } path = CloudBuildClient.common_organization_path(**expected) @@ -17820,7 +17778,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -17830,7 +17788,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "nautilus", } path = CloudBuildClient.common_project_path(**expected) @@ -17840,8 +17798,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -17852,8 +17810,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "squid", + "location": "clam", } path = CloudBuildClient.common_location_path(**expected) diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py index f9069b00a3dd..5be3cfbec46e 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py @@ -344,86 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-certificate-manager/CHANGELOG.md b/packages/google-cloud-certificate-manager/CHANGELOG.md index 6e0c93ab449e..988dba41a365 100644 --- a/packages/google-cloud-certificate-manager/CHANGELOG.md +++ b/packages/google-cloud-certificate-manager/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.8.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-certificate-manager-v1.8.0...google-cloud-certificate-manager-v1.8.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [1.8.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-certificate-manager-v1.7.2...google-cloud-certificate-manager-v1.8.0) (2024-10-24) diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py index 4b114d153974..02874f69f4e5 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "1.8.1" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py index 4b114d153974..02874f69f4e5 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "1.8.1" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py index 0fb3f8ee90a1..aee840ef5ff6 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py @@ -640,36 +640,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CertificateManagerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -679,13 +649,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CertificateManagerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json index 89c2ca3b5ca6..913377237d4f 100644 --- a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json +++ b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-certificate-manager", - "version": "1.8.0" + "version": "1.8.1" }, "snippets": [ { diff --git a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py index 37943cacd1dc..c674b586a135 100644 --- a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py +++ b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py @@ -352,86 +352,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CertificateManagerClient, transports.CertificateManagerGrpcTransport, "grpc"), - (CertificateManagerClient, transports.CertificateManagerRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-channel/CHANGELOG.md b/packages/google-cloud-channel/CHANGELOG.md index 172bc3455928..06508e42eecb 100644 --- a/packages/google-cloud-channel/CHANGELOG.md +++ b/packages/google-cloud-channel/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.20.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.20.0...google-cloud-channel-v1.20.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [1.20.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.19.0...google-cloud-channel-v1.20.0) (2024-10-24) diff --git a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py index c8ba2b4c6a4f..4da8c821edb1 100644 --- a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.20.0" # {x-release-please-version} +__version__ = "1.20.1" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py index c8ba2b4c6a4f..4da8c821edb1 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.20.0" # {x-release-please-version} +__version__ = "1.20.1" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py index 7b838548916c..433e0999274b 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py @@ -488,36 +488,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudChannelReportsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -527,13 +497,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudChannelReportsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py index 50bf88c61dba..65efe3abed74 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py @@ -669,36 +669,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudChannelServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -708,13 +678,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudChannelServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index 596e081cf9c6..246e3a4a4d85 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-channel", - "version": "1.20.0" + "version": "1.20.1" }, "snippets": [ { diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py index 666777b16107..e9c6f7cf56a8 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py @@ -347,89 +347,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CloudChannelReportsServiceClient, - transports.CloudChannelReportsServiceGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index 16d2ef018171..765870dc1078 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -356,89 +356,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CloudChannelServiceClient, - transports.CloudChannelServiceGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md index 6bfc0f7a5bb6..44bbd336a68d 100644 --- a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md +++ b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.2.1...google-cloud-cloudcontrolspartner-v0.2.2) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.2.0...google-cloud-cloudcontrolspartner-v0.2.1) (2024-10-24) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 6d8247638d59..d1a1a883babd 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 6d8247638d59..d1a1a883babd 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py index 82b86d2f3332..a3781f75beae 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py @@ -592,36 +592,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudControlsPartnerCoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -631,13 +601,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudControlsPartnerCoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py index 0f7d9f952054..1cd8567abb42 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py @@ -478,36 +478,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudControlsPartnerMonitoringClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -517,13 +487,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudControlsPartnerMonitoringClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 6d8247638d59..d1a1a883babd 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.1" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py index 1896fa7459b7..48c0714b9751 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py @@ -592,36 +592,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudControlsPartnerCoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -631,13 +601,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudControlsPartnerCoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py index edde6d0911ee..9c5de81b1d9e 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py @@ -478,36 +478,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudControlsPartnerMonitoringClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -517,13 +487,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudControlsPartnerMonitoringClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 220f0f650c42..392fc5204423 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.2.1" + "version": "0.2.2" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index 33dd6a359fe1..d9b197cc0830 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.2.1" + "version": "0.2.2" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py index 400ce936680a..f9919a53eb72 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py @@ -343,94 +343,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CloudControlsPartnerCoreClient, - transports.CloudControlsPartnerCoreGrpcTransport, - "grpc", - ), - ( - CloudControlsPartnerCoreClient, - transports.CloudControlsPartnerCoreRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py index 378cd65cbe72..843ca40fda26 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py @@ -352,94 +352,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CloudControlsPartnerMonitoringClient, - transports.CloudControlsPartnerMonitoringGrpcTransport, - "grpc", - ), - ( - CloudControlsPartnerMonitoringClient, - transports.CloudControlsPartnerMonitoringRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py index 34cc60cc0b8f..554f9c30d42d 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py @@ -343,94 +343,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CloudControlsPartnerCoreClient, - transports.CloudControlsPartnerCoreGrpcTransport, - "grpc", - ), - ( - CloudControlsPartnerCoreClient, - transports.CloudControlsPartnerCoreRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py index 2c6f413e9378..5eb418b32ef6 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py @@ -352,94 +352,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - CloudControlsPartnerMonitoringClient, - transports.CloudControlsPartnerMonitoringGrpcTransport, - "grpc", - ), - ( - CloudControlsPartnerMonitoringClient, - transports.CloudControlsPartnerMonitoringRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md index a3a9ec2ec011..877079e395ba 100644 --- a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md +++ b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.9...google-cloud-commerce-consumer-procurement-v0.1.10) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.8...google-cloud-commerce-consumer-procurement-v0.1.9) (2024-10-24) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py index 119c0adfc5cc..cf83af1f0889 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py @@ -497,36 +497,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConsumerProcurementServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -536,13 +506,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConsumerProcurementServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py index 9aa5a85da06f..a943fc3ff392 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py @@ -470,36 +470,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = LicenseManagementServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -509,13 +479,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or LicenseManagementServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py index df680ceecc3a..0d17b8a8a05a 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py @@ -497,36 +497,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConsumerProcurementServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -536,13 +506,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConsumerProcurementServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json index 78c1a769e8f3..bf50eef1c644 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.9" + "version": "0.1.10" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json index 1fcc5668de22..0bc5add2d4eb 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.9" + "version": "0.1.10" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py index 59b5252ad41d..ccb358a69e42 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py @@ -353,94 +353,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ConsumerProcurementServiceClient, - transports.ConsumerProcurementServiceGrpcTransport, - "grpc", - ), - ( - ConsumerProcurementServiceClient, - transports.ConsumerProcurementServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py index ace34dbae51f..c6d289359a67 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py @@ -340,94 +340,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - LicenseManagementServiceClient, - transports.LicenseManagementServiceGrpcTransport, - "grpc", - ), - ( - LicenseManagementServiceClient, - transports.LicenseManagementServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py index dc9d5e178cbc..9b14a7739474 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py @@ -353,94 +353,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ConsumerProcurementServiceClient, - transports.ConsumerProcurementServiceGrpcTransport, - "grpc", - ), - ( - ConsumerProcurementServiceClient, - transports.ConsumerProcurementServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/CHANGELOG.md b/packages/google-cloud-compute/CHANGELOG.md index be45071a6608..58d7ba36ca6d 100644 --- a/packages/google-cloud-compute/CHANGELOG.md +++ b/packages/google-cloud-compute/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-compute-v1.20.1...google-cloud-compute-v1.21.0) (2024-11-11) + + +### Features + +* Update Compute Engine API to revision 20241015 ([#13252](https://github.com/googleapis/google-cloud-python/issues/13252)) ([c0351dd](https://github.com/googleapis/google-cloud-python/commit/c0351dd6a6726bca6c0eb6e995d7889f3956e03b)) + +## [1.20.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-compute-v1.20.0...google-cloud-compute-v1.20.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [1.20.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-compute-v1.19.2...google-cloud-compute-v1.20.0) (2024-10-24) diff --git a/packages/google-cloud-compute/google/cloud/compute/__init__.py b/packages/google-cloud-compute/google/cloud/compute/__init__.py index 0a481c591240..d73b4a051359 100644 --- a/packages/google-cloud-compute/google/cloud/compute/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute/__init__.py @@ -338,7 +338,6 @@ AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, AuditConfig, AuditLogConfig, - AuthorizationLoggingOptions, Autoscaler, AutoscalerAggregatedList, AutoscalerList, @@ -358,6 +357,7 @@ BackendBucketCdnPolicyCacheKeyPolicy, BackendBucketCdnPolicyNegativeCachingPolicy, BackendBucketList, + BackendBucketUsedBy, BackendService, BackendServiceAggregatedList, BackendServiceCdnPolicy, @@ -366,6 +366,7 @@ BackendServiceConnectionTrackingPolicy, BackendServiceFailoverPolicy, BackendServiceGroupHealth, + BackendServiceHttpCookie, BackendServiceIAP, BackendServiceList, BackendServiceListUsable, @@ -398,8 +399,8 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentResourceStatus, CommitmentsScopedList, - Condition, ConfidentialInstanceConfig, ConnectionDraining, ConsistentHashLoadBalancerSettings, @@ -411,6 +412,8 @@ CreateSnapshotRegionDiskRequest, CustomerEncryptionKey, CustomerEncryptionKeyProtectedDisk, + CustomErrorResponsePolicy, + CustomErrorResponsePolicyCustomErrorResponseRule, Data, DeleteAccessConfigInstanceRequest, DeleteAddressRequest, @@ -847,6 +850,8 @@ InstanceGroupManagerAggregatedList, InstanceGroupManagerAllInstancesConfig, InstanceGroupManagerAutoHealingPolicy, + InstanceGroupManagerInstanceFlexibilityPolicy, + InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection, InstanceGroupManagerInstanceLifecyclePolicy, InstanceGroupManagerList, InstanceGroupManagerResizeRequest, @@ -1076,11 +1081,6 @@ LocationPolicy, LocationPolicyLocation, LocationPolicyLocationConstraints, - LogConfig, - LogConfigCloudAuditOptions, - LogConfigCounterOptions, - LogConfigCounterOptionsCustomField, - LogConfigDataAccessOptions, MachineImage, MachineImageList, MachineType, @@ -1090,6 +1090,7 @@ ManagedInstance, ManagedInstanceInstanceHealth, ManagedInstanceLastAttempt, + ManagedInstancePropertiesFromFlexibilityPolicy, ManagedInstanceVersion, Metadata, MetadataFilter, @@ -1345,6 +1346,7 @@ ResourcePolicyWeeklyCycle, ResourcePolicyWeeklyCycleDayOfWeek, ResourceStatus, + ResourceStatusScheduling, ResumeInstanceRequest, Route, RouteAsPath, @@ -1371,12 +1373,12 @@ RouterStatusNatStatus, RouterStatusNatStatusNatRuleStatus, RouterStatusResponse, - Rule, SavedAttachedDisk, SavedDisk, ScalingScheduleStatus, Scheduling, SchedulingNodeAffinity, + SchedulingOnInstanceStopAction, ScratchDisks, Screenshot, SecurityPoliciesAggregatedList, @@ -1387,6 +1389,7 @@ SecurityPolicyAdaptiveProtectionConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig, + SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig, SecurityPolicyAdvancedOptionsConfig, SecurityPolicyAdvancedOptionsConfigJsonCustomConfig, SecurityPolicyDdosProtectionConfig, @@ -1935,7 +1938,6 @@ "AttachNetworkEndpointsRegionNetworkEndpointGroupRequest", "AuditConfig", "AuditLogConfig", - "AuthorizationLoggingOptions", "Autoscaler", "AutoscalerAggregatedList", "AutoscalerList", @@ -1955,6 +1957,7 @@ "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketUsedBy", "BackendService", "BackendServiceAggregatedList", "BackendServiceCdnPolicy", @@ -1963,6 +1966,7 @@ "BackendServiceConnectionTrackingPolicy", "BackendServiceFailoverPolicy", "BackendServiceGroupHealth", + "BackendServiceHttpCookie", "BackendServiceIAP", "BackendServiceList", "BackendServiceListUsable", @@ -1995,8 +1999,8 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentResourceStatus", "CommitmentsScopedList", - "Condition", "ConfidentialInstanceConfig", "ConnectionDraining", "ConsistentHashLoadBalancerSettings", @@ -2008,6 +2012,8 @@ "CreateSnapshotRegionDiskRequest", "CustomerEncryptionKey", "CustomerEncryptionKeyProtectedDisk", + "CustomErrorResponsePolicy", + "CustomErrorResponsePolicyCustomErrorResponseRule", "Data", "DeleteAccessConfigInstanceRequest", "DeleteAddressRequest", @@ -2444,6 +2450,8 @@ "InstanceGroupManagerAggregatedList", "InstanceGroupManagerAllInstancesConfig", "InstanceGroupManagerAutoHealingPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", "InstanceGroupManagerResizeRequest", @@ -2673,11 +2681,6 @@ "LocationPolicy", "LocationPolicyLocation", "LocationPolicyLocationConstraints", - "LogConfig", - "LogConfigCloudAuditOptions", - "LogConfigCounterOptions", - "LogConfigCounterOptionsCustomField", - "LogConfigDataAccessOptions", "MachineImage", "MachineImageList", "MachineType", @@ -2687,6 +2690,7 @@ "ManagedInstance", "ManagedInstanceInstanceHealth", "ManagedInstanceLastAttempt", + "ManagedInstancePropertiesFromFlexibilityPolicy", "ManagedInstanceVersion", "Metadata", "MetadataFilter", @@ -2942,6 +2946,7 @@ "ResourcePolicyWeeklyCycle", "ResourcePolicyWeeklyCycleDayOfWeek", "ResourceStatus", + "ResourceStatusScheduling", "ResumeInstanceRequest", "Route", "RouteAsPath", @@ -2968,12 +2973,12 @@ "RouterStatusNatStatus", "RouterStatusNatStatusNatRuleStatus", "RouterStatusResponse", - "Rule", "SavedAttachedDisk", "SavedDisk", "ScalingScheduleStatus", "Scheduling", "SchedulingNodeAffinity", + "SchedulingOnInstanceStopAction", "ScratchDisks", "Screenshot", "SecurityPoliciesAggregatedList", @@ -2984,6 +2989,7 @@ "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", diff --git a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py index c8ba2b4c6a4f..785067d93b3c 100644 --- a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.20.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py index c1d442a9ad77..9f611f7657f2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py @@ -222,7 +222,6 @@ AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, AuditConfig, AuditLogConfig, - AuthorizationLoggingOptions, Autoscaler, AutoscalerAggregatedList, AutoscalerList, @@ -242,6 +241,7 @@ BackendBucketCdnPolicyCacheKeyPolicy, BackendBucketCdnPolicyNegativeCachingPolicy, BackendBucketList, + BackendBucketUsedBy, BackendService, BackendServiceAggregatedList, BackendServiceCdnPolicy, @@ -250,6 +250,7 @@ BackendServiceConnectionTrackingPolicy, BackendServiceFailoverPolicy, BackendServiceGroupHealth, + BackendServiceHttpCookie, BackendServiceIAP, BackendServiceList, BackendServiceListUsable, @@ -282,8 +283,8 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentResourceStatus, CommitmentsScopedList, - Condition, ConfidentialInstanceConfig, ConnectionDraining, ConsistentHashLoadBalancerSettings, @@ -295,6 +296,8 @@ CreateSnapshotRegionDiskRequest, CustomerEncryptionKey, CustomerEncryptionKeyProtectedDisk, + CustomErrorResponsePolicy, + CustomErrorResponsePolicyCustomErrorResponseRule, Data, DeleteAccessConfigInstanceRequest, DeleteAddressRequest, @@ -731,6 +734,8 @@ InstanceGroupManagerAggregatedList, InstanceGroupManagerAllInstancesConfig, InstanceGroupManagerAutoHealingPolicy, + InstanceGroupManagerInstanceFlexibilityPolicy, + InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection, InstanceGroupManagerInstanceLifecyclePolicy, InstanceGroupManagerList, InstanceGroupManagerResizeRequest, @@ -960,11 +965,6 @@ LocationPolicy, LocationPolicyLocation, LocationPolicyLocationConstraints, - LogConfig, - LogConfigCloudAuditOptions, - LogConfigCounterOptions, - LogConfigCounterOptionsCustomField, - LogConfigDataAccessOptions, MachineImage, MachineImageList, MachineType, @@ -974,6 +974,7 @@ ManagedInstance, ManagedInstanceInstanceHealth, ManagedInstanceLastAttempt, + ManagedInstancePropertiesFromFlexibilityPolicy, ManagedInstanceVersion, Metadata, MetadataFilter, @@ -1229,6 +1230,7 @@ ResourcePolicyWeeklyCycle, ResourcePolicyWeeklyCycleDayOfWeek, ResourceStatus, + ResourceStatusScheduling, ResumeInstanceRequest, Route, RouteAsPath, @@ -1255,12 +1257,12 @@ RouterStatusNatStatus, RouterStatusNatStatusNatRuleStatus, RouterStatusResponse, - Rule, SavedAttachedDisk, SavedDisk, ScalingScheduleStatus, Scheduling, SchedulingNodeAffinity, + SchedulingOnInstanceStopAction, ScratchDisks, Screenshot, SecurityPoliciesAggregatedList, @@ -1271,6 +1273,7 @@ SecurityPolicyAdaptiveProtectionConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig, + SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig, SecurityPolicyAdvancedOptionsConfig, SecurityPolicyAdvancedOptionsConfigJsonCustomConfig, SecurityPolicyDdosProtectionConfig, @@ -1724,7 +1727,6 @@ "AttachedDiskInitializeParams", "AuditConfig", "AuditLogConfig", - "AuthorizationLoggingOptions", "Autoscaler", "AutoscalerAggregatedList", "AutoscalerList", @@ -1744,6 +1746,7 @@ "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketUsedBy", "BackendBucketsClient", "BackendService", "BackendServiceAggregatedList", @@ -1753,6 +1756,7 @@ "BackendServiceConnectionTrackingPolicy", "BackendServiceFailoverPolicy", "BackendServiceGroupHealth", + "BackendServiceHttpCookie", "BackendServiceIAP", "BackendServiceList", "BackendServiceListUsable", @@ -1786,8 +1790,8 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentResourceStatus", "CommitmentsScopedList", - "Condition", "ConfidentialInstanceConfig", "ConnectionDraining", "ConsistentHashLoadBalancerSettings", @@ -1797,6 +1801,8 @@ "CreateInstancesRegionInstanceGroupManagerRequest", "CreateSnapshotDiskRequest", "CreateSnapshotRegionDiskRequest", + "CustomErrorResponsePolicy", + "CustomErrorResponsePolicyCustomErrorResponseRule", "CustomerEncryptionKey", "CustomerEncryptionKeyProtectedDisk", "Data", @@ -2250,6 +2256,8 @@ "InstanceGroupManagerAggregatedList", "InstanceGroupManagerAllInstancesConfig", "InstanceGroupManagerAutoHealingPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", "InstanceGroupManagerResizeRequest", @@ -2492,11 +2500,6 @@ "LocationPolicy", "LocationPolicyLocation", "LocationPolicyLocationConstraints", - "LogConfig", - "LogConfigCloudAuditOptions", - "LogConfigCounterOptions", - "LogConfigCounterOptionsCustomField", - "LogConfigDataAccessOptions", "MachineImage", "MachineImageList", "MachineImagesClient", @@ -2508,6 +2511,7 @@ "ManagedInstance", "ManagedInstanceInstanceHealth", "ManagedInstanceLastAttempt", + "ManagedInstancePropertiesFromFlexibilityPolicy", "ManagedInstanceVersion", "Metadata", "MetadataFilter", @@ -2802,6 +2806,7 @@ "ResourcePolicyWeeklyCycle", "ResourcePolicyWeeklyCycleDayOfWeek", "ResourceStatus", + "ResourceStatusScheduling", "ResumeInstanceRequest", "Route", "RouteAsPath", @@ -2830,13 +2835,13 @@ "RoutersPreviewResponse", "RoutersScopedList", "RoutesClient", - "Rule", "SSLHealthCheck", "SavedAttachedDisk", "SavedDisk", "ScalingScheduleStatus", "Scheduling", "SchedulingNodeAffinity", + "SchedulingOnInstanceStopAction", "ScratchDisks", "Screenshot", "SecurityPoliciesAggregatedList", @@ -2848,6 +2853,7 @@ "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py index c8ba2b4c6a4f..785067d93b3c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.20.0" # {x-release-please-version} +__version__ = "1.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py index e3c490fde1e6..edb5e25be0a4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AcceleratorTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AcceleratorTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py index bd522fdd409c..ecdf130ba5ba 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AddressesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AddressesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py index 654de7d186ed..d0584d2f6f64 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AutoscalersClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AutoscalersClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py index 8a244f67edab..af49015555ad 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BackendBucketsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BackendBucketsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py index 8f6aa412de12..3768d6154768 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BackendServicesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BackendServicesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -2400,8 +2366,8 @@ def list_usable( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListUsablePager: - r"""Retrieves an aggregated list of all usable backend - services in the specified project. + r"""Retrieves a list of all usable backend services in + the specified project. .. code-block:: python diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py index 3d2aa700494a..2982081397d9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py @@ -433,36 +433,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DiskTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -472,13 +442,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DiskTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py index d8f3e5164750..e0637dd7e56e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DisksClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DisksClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py index 173f2d4d5e5c..690c9b15c0ec 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ExternalVpnGatewaysClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ExternalVpnGatewaysClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py index f5c93138a301..2d349546a644 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FirewallPoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FirewallPoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py index a25be623c57b..6f02daa4fc23 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FirewallsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FirewallsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py index 479f226c6089..53e9a20ae3e0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ForwardingRulesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ForwardingRulesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py index aa361a0639af..116ff97f44f1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GlobalAddressesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GlobalAddressesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py index 73fac29acac7..f90f59354a39 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GlobalForwardingRulesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GlobalForwardingRulesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py index 9c3968a6f6d0..dbb29bfda564 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GlobalNetworkEndpointGroupsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GlobalNetworkEndpointGroupsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py index 582f90608011..658858c6c7d8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py @@ -435,36 +435,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GlobalOperationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -474,13 +444,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GlobalOperationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -827,7 +793,8 @@ def sample_delete(): should not be set. operation (str): Name of the Operations resource to - delete. + delete, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this @@ -946,7 +913,8 @@ def sample_get(): should not be set. operation (str): Name of the Operations resource to - return. + return, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this @@ -1209,7 +1177,8 @@ def sample_wait(): should not be set. operation (str): Name of the Operations resource to - return. + return, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py index 917163e5dde7..cac7229cc72f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -439,36 +439,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GlobalOrganizationOperationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -478,13 +448,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GlobalOrganizationOperationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -715,7 +681,8 @@ def sample_delete(): the method description for details. operation (str): Name of the Operations resource to - delete. + delete, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this @@ -825,7 +792,8 @@ def sample_get(): the method description for details. operation (str): Name of the Operations resource to - return. + return, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py index 31baf252244e..f085b6f6112f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GlobalPublicDelegatedPrefixesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GlobalPublicDelegatedPrefixesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py index 390ca315950a..35a872335d1f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = HealthChecksClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or HealthChecksClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py index a92d0d7c5fe2..86b18ac725db 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py @@ -434,36 +434,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ImageFamilyViewsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -473,13 +443,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ImageFamilyViewsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py index 28351f58c4eb..aae696350b3b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ImagesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ImagesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py index 721dce6bb8a2..24ab6efa975d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py @@ -449,36 +449,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -488,13 +458,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstanceGroupManagerResizeRequestsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py index d65cb6c12b37..59c234a1629e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstanceGroupManagersClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstanceGroupManagersClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py index 9d4f7dea8bd7..603848969af8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstanceGroupsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstanceGroupsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py index 6b2aa28abee9..16e2a1cf0315 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py @@ -439,36 +439,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstanceSettingsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -478,13 +448,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstanceSettingsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py index aab4c02a23b2..e7a398af57cf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstanceTemplatesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstanceTemplatesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py index 489e7e33bbec..19127f30fa54 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstancesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstancesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py index 64699398706e..f1b39d2a1396 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InstantSnapshotsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InstantSnapshotsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py index 253f65fefb4b..5c532b970d23 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InterconnectAttachmentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InterconnectAttachmentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py index 4c1955436e94..772bcb8aa217 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py @@ -437,36 +437,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InterconnectLocationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -476,13 +446,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InterconnectLocationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py index de0a24175bb3..a156fef3d9b9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py @@ -439,36 +439,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InterconnectRemoteLocationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -478,13 +448,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InterconnectRemoteLocationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py index aaceb51c97e0..27a2081e65c3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = InterconnectsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or InterconnectsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py index 19791d56e0c1..d2ef87ed2844 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py @@ -432,36 +432,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = LicenseCodesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -471,13 +441,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or LicenseCodesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py index 1e391cd879eb..1e462d31cc90 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = LicensesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or LicensesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py index d8263f56e910..a0fa79be98ae 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MachineImagesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MachineImagesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py index 76b729111007..4d9e53608a0d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py @@ -433,36 +433,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MachineTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -472,13 +442,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MachineTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py index 2e218d73ffe9..76ae30b54d43 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NetworkAttachmentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NetworkAttachmentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py index 491d38581d21..f936ca6897e7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NetworkEdgeSecurityServicesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NetworkEdgeSecurityServicesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py index 5dd8c4475ead..ce11b9915b2c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NetworkEndpointGroupsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NetworkEndpointGroupsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py index 2ce5915f843f..7a6099444cfa 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NetworkFirewallPoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NetworkFirewallPoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py index 2d6c6fdcca83..f717cc871469 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NetworksClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NetworksClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -1935,8 +1901,7 @@ def patch_unary( metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Patches the specified network with the data included - in the request. Only the following fields can be - modified: routingConfig.routingMode. + in the request. Only routingConfig can be modified. .. code-block:: python @@ -2060,8 +2025,7 @@ def patch( metadata: Sequence[Tuple[str, str]] = (), ) -> extended_operation.ExtendedOperation: r"""Patches the specified network with the data included - in the request. Only the following fields can be - modified: routingConfig.routingMode. + in the request. Only routingConfig can be modified. .. code-block:: python diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py index abfa417c23ea..482ea133127e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NodeGroupsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NodeGroupsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py index a37de9001145..57c083a84003 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NodeTemplatesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NodeTemplatesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py index 5d20d96caba5..e7314ae950fd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py @@ -433,36 +433,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NodeTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -472,13 +442,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NodeTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py index 8f3aae82b89e..718b854586da 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PacketMirroringsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PacketMirroringsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py index 3cd34365bdb5..68f359e65666 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ProjectsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ProjectsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py index f988a2b8c93b..d48e695443a8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PublicAdvertisedPrefixesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PublicAdvertisedPrefixesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py index 82989f868234..74e71da43c37 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PublicDelegatedPrefixesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PublicDelegatedPrefixesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py index 1fc50b748039..0ce4a252df1c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionAutoscalersClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionAutoscalersClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py index 76de8cad59dc..05b1361aced9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionBackendServicesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionBackendServicesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -1831,8 +1797,8 @@ def list_usable( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListUsablePager: - r"""Retrieves an aggregated list of all usable backend - services in the specified project in the given region. + r"""Retrieves a list of all usable backend services in + the specified project in the given region. .. code-block:: python diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py index 1df1123fcdb5..fbe87ac20479 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionCommitmentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionCommitmentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py index e985fbb9e895..af05f4178fd6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py @@ -435,36 +435,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionDiskTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -474,13 +444,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionDiskTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py index 6f97ae0f0868..68a0779f4ab3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionDisksClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionDisksClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py index a26bcfd452d9..fb3642bd35c0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionHealthCheckServicesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionHealthCheckServicesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py index 30fea9ff210a..6a4f3b38feae 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionHealthChecksClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionHealthChecksClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py index ce6498488bf9..ca845e244c9e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionInstanceGroupManagersClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionInstanceGroupManagersClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py index aacea1576000..950e78ff3d06 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionInstanceGroupsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionInstanceGroupsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py index 911bd2c6ae22..0bf07cbc064a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionInstanceTemplatesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionInstanceTemplatesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py index b9838ead41aa..4a6f14f42ab0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py @@ -437,36 +437,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionInstancesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -476,13 +446,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionInstancesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py index 5f2772163b6a..9e92d404c9b6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionInstantSnapshotsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionInstantSnapshotsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py index ae10133ac5df..1ad24a848865 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionNetworkEndpointGroupsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionNetworkEndpointGroupsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py index 9ef32491766d..84de4e030a39 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionNetworkFirewallPoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionNetworkFirewallPoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py index 8f9b353ce1dd..387c17e95b14 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py @@ -442,36 +442,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionNotificationEndpointsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -481,13 +451,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionNotificationEndpointsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py index 9afa7640b4ae..65cfbb9d8166 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py @@ -435,36 +435,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionOperationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -474,13 +444,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionOperationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -716,7 +682,8 @@ def sample_delete(): should not be set. operation (str): Name of the Operations resource to - delete. + delete, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this @@ -846,7 +813,8 @@ def sample_get(): should not be set. operation (str): Name of the Operations resource to - return. + return, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this @@ -1133,7 +1101,8 @@ def sample_wait(): should not be set. operation (str): Name of the Operations resource to - return. + return, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py index 64cd68517767..a8e94a8e5768 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionSecurityPoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionSecurityPoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py index 601228014ab6..d22361bc8215 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionSslCertificatesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionSslCertificatesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py index 3dad39ce1e01..b5130166d07b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionSslPoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionSslPoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py index 4fa02950c9ee..758f93df3c10 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionTargetHttpProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionTargetHttpProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py index a0a67bf5464d..a4963f9723b4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionTargetHttpsProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionTargetHttpsProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py index 401ea84bb7f3..62f5bbb2439c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionTargetTcpProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionTargetTcpProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py index af510a30bb48..c48097adc175 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionUrlMapsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionUrlMapsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -1033,9 +999,9 @@ def sample_get(): resource defines mappings from hostnames and URL paths to either a backend service or a backend bucket. To use the global urlMaps resource, the backend service must - have a loadBalancingScheme of either EXTERNAL or - INTERNAL_SELF_MANAGED. To use the regionUrlMaps - resource, the backend service must have a + have a loadBalancingScheme of either EXTERNAL, + EXTERNAL_MANAGED, or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py index 80b7b2ddb93f..88391751ac3f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py @@ -540,9 +540,9 @@ def __call__( resource defines mappings from hostnames and URL paths to either a backend service or a backend bucket. To use the global urlMaps resource, the backend service must - have a loadBalancingScheme of either EXTERNAL or - INTERNAL_SELF_MANAGED. To use the regionUrlMaps - resource, the backend service must have a + have a loadBalancingScheme of either EXTERNAL, + EXTERNAL_MANAGED, or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py index d011854631b2..b7b7de589541 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py @@ -433,36 +433,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionZonesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -472,13 +442,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionZonesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py index 9502b764063d..51c4d28dd76c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py @@ -433,36 +433,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RegionsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -472,13 +442,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RegionsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -667,7 +633,15 @@ def get( request's ``fields`` query parameter to only include the fields you need. For example, to only include the ``id`` and ``selfLink`` fields, add the query parameter - ``?fields=id,selfLink`` to your request. + ``?fields=id,selfLink`` to your request. This method fails if + the quota information is unavailable for the region and if the + organization policy constraint + compute.requireBasicQuotaInResponse is enforced. This + constraint, when enforced, disables the fail-open behaviour when + quota information (the ``items.quotas`` field) is unavailable + for the region. It is recommended to use the default setting for + the constraint unless your application requires the fail-closed + behaviour for this method. .. code-block:: python @@ -793,7 +767,15 @@ def list( exclude one or more fields, set your request's ``fields`` query parameter to only include the fields you need. For example, to only include the ``id`` and ``selfLink`` fields, add the query - parameter ``?fields=id,selfLink`` to your request. + parameter ``?fields=id,selfLink`` to your request. This method + fails if the quota information is unavailable for the region and + if the organization policy constraint + compute.requireBasicQuotaInResponse is enforced. This + constraint, when enforced, disables the fail-open behaviour when + quota information (the ``items.quotas`` field) is unavailable + for the region. It is recommended to use the default setting for + the constraint unless your application requires the fail-closed + behaviour for this method. .. code-block:: python diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py index 712408dd11c0..a9353f050c70 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ReservationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ReservationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py index 15544bdf9cbd..29a86eb90826 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ResourcePoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ResourcePoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py index 16244812f736..434841575504 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RoutersClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RoutersClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py index 4a180fbf1b7f..5602426546eb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RoutesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RoutesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py index 9866a67f1ee7..1a489a6fa42f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SecurityPoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SecurityPoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py index ed32f20eb629..473c5260eca9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ServiceAttachmentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ServiceAttachmentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py index 96038398783e..db5865f609cc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py @@ -439,36 +439,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SnapshotSettingsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -478,13 +448,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SnapshotSettingsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py index 48ac2b521d06..ab2732030918 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SnapshotsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SnapshotsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py index 9f8d79a499c5..cce8256d7fc3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SslCertificatesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SslCertificatesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py index 5a0b7f483a54..1518200ad0b7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SslPoliciesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SslPoliciesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py index 66c919a57704..747e58f88d07 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py @@ -435,36 +435,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = StoragePoolTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -474,13 +444,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or StoragePoolTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py index e70dc879e944..167bcab4d089 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = StoragePoolsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or StoragePoolsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -2219,7 +2185,8 @@ def update_unary( r"""Updates the specified storagePool with the data included in the request. The update is performed only on selected fields included as part of update-mask. Only the following fields can - be modified: size_tb and provisioned_iops. + be modified: pool_provisioned_capacity_gb, pool_provisioned_iops + and pool_provisioned_throughput. .. code-block:: python @@ -2360,7 +2327,8 @@ def update( r"""Updates the specified storagePool with the data included in the request. The update is performed only on selected fields included as part of update-mask. Only the following fields can - be modified: size_tb and provisioned_iops. + be modified: pool_provisioned_capacity_gb, pool_provisioned_iops + and pool_provisioned_throughput. .. code-block:: python diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py index 2ffc259804cf..be2fd4ed7237 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SubnetworksClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SubnetworksClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py index 021e2c228f58..93bbd3659722 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetGrpcProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetGrpcProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py index 524ef81a25da..fa82cb8844cf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetHttpProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetHttpProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py index 5fc74cea1db9..aa470a359be4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetHttpsProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetHttpsProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py index 2575ea47aa79..35b3296d45d6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetInstancesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetInstancesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py index b575ba53b5cf..3be7e5490ca8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetPoolsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetPoolsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py index ea783bb9a2fa..45690907b3b1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetSslProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetSslProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py index ed1563d2f327..a7af51bee5ff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetTcpProxiesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetTcpProxiesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py index a9c6cb8df839..2f1ac9316952 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py @@ -438,36 +438,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TargetVpnGatewaysClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -477,13 +447,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TargetVpnGatewaysClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py index 9692ef9e40dd..8d3417f41f9e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = UrlMapsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or UrlMapsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -1113,9 +1079,9 @@ def sample_get(): resource defines mappings from hostnames and URL paths to either a backend service or a backend bucket. To use the global urlMaps resource, the backend service must - have a loadBalancingScheme of either EXTERNAL or - INTERNAL_SELF_MANAGED. To use the regionUrlMaps - resource, the backend service must have a + have a loadBalancingScheme of either EXTERNAL, + EXTERNAL_MANAGED, or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py index ba84ea21b49a..ce9647dd6788 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py @@ -676,9 +676,9 @@ def __call__( resource defines mappings from hostnames and URL paths to either a backend service or a backend bucket. To use the global urlMaps resource, the backend service must - have a loadBalancingScheme of either EXTERNAL or - INTERNAL_SELF_MANAGED. To use the regionUrlMaps - resource, the backend service must have a + have a loadBalancingScheme of either EXTERNAL, + EXTERNAL_MANAGED, or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py index a3db517a80c0..cfcae3e45681 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = VpnGatewaysClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or VpnGatewaysClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py index 86a982810b11..232f0698d33a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py @@ -436,36 +436,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = VpnTunnelsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -475,13 +445,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or VpnTunnelsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py index 8d59625da496..2269e94c2de7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py @@ -435,36 +435,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ZoneOperationsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -474,13 +444,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ZoneOperationsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -713,7 +679,8 @@ def sample_delete(): should not be set. operation (str): Name of the Operations resource to - delete. + delete, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this @@ -843,7 +810,8 @@ def sample_get(): should not be set. operation (str): Name of the Operations resource to - return. + return, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this @@ -1129,7 +1097,8 @@ def sample_wait(): should not be set. operation (str): Name of the Operations resource to - return. + return, or its unique numeric + identifier. This corresponds to the ``operation`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py index 6d712df2de77..19cd5b5b32ab 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py @@ -433,36 +433,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ZonesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -472,13 +442,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ZonesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -713,7 +679,7 @@ def sample_get(): Represents a Zone resource. A zone is a deployment area. These deployment areas are subsets of a region. For - example the zone us-east1-a is located + example the zone us-east1-b is located in the us-east1 region. For more information, read Regions and Zones. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py index e91db7d5fefe..34e7120035a7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py @@ -262,7 +262,7 @@ def __call__( Represents a Zone resource. A zone is a deployment area. These deployment areas are subsets of a region. For - example the zone us-east1-a is located + example the zone us-east1-b is located in the us-east1 region. For more information, read Regions and Zones. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py index b5e1ad81fe09..10aa25c16145 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py @@ -113,7 +113,6 @@ AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, AuditConfig, AuditLogConfig, - AuthorizationLoggingOptions, Autoscaler, AutoscalerAggregatedList, AutoscalerList, @@ -133,6 +132,7 @@ BackendBucketCdnPolicyCacheKeyPolicy, BackendBucketCdnPolicyNegativeCachingPolicy, BackendBucketList, + BackendBucketUsedBy, BackendService, BackendServiceAggregatedList, BackendServiceCdnPolicy, @@ -141,6 +141,7 @@ BackendServiceConnectionTrackingPolicy, BackendServiceFailoverPolicy, BackendServiceGroupHealth, + BackendServiceHttpCookie, BackendServiceIAP, BackendServiceList, BackendServiceListUsable, @@ -173,8 +174,8 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentResourceStatus, CommitmentsScopedList, - Condition, ConfidentialInstanceConfig, ConnectionDraining, ConsistentHashLoadBalancerSettings, @@ -186,6 +187,8 @@ CreateSnapshotRegionDiskRequest, CustomerEncryptionKey, CustomerEncryptionKeyProtectedDisk, + CustomErrorResponsePolicy, + CustomErrorResponsePolicyCustomErrorResponseRule, Data, DeleteAccessConfigInstanceRequest, DeleteAddressRequest, @@ -622,6 +625,8 @@ InstanceGroupManagerAggregatedList, InstanceGroupManagerAllInstancesConfig, InstanceGroupManagerAutoHealingPolicy, + InstanceGroupManagerInstanceFlexibilityPolicy, + InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection, InstanceGroupManagerInstanceLifecyclePolicy, InstanceGroupManagerList, InstanceGroupManagerResizeRequest, @@ -851,11 +856,6 @@ LocationPolicy, LocationPolicyLocation, LocationPolicyLocationConstraints, - LogConfig, - LogConfigCloudAuditOptions, - LogConfigCounterOptions, - LogConfigCounterOptionsCustomField, - LogConfigDataAccessOptions, MachineImage, MachineImageList, MachineType, @@ -865,6 +865,7 @@ ManagedInstance, ManagedInstanceInstanceHealth, ManagedInstanceLastAttempt, + ManagedInstancePropertiesFromFlexibilityPolicy, ManagedInstanceVersion, Metadata, MetadataFilter, @@ -1120,6 +1121,7 @@ ResourcePolicyWeeklyCycle, ResourcePolicyWeeklyCycleDayOfWeek, ResourceStatus, + ResourceStatusScheduling, ResumeInstanceRequest, Route, RouteAsPath, @@ -1146,12 +1148,12 @@ RouterStatusNatStatus, RouterStatusNatStatusNatRuleStatus, RouterStatusResponse, - Rule, SavedAttachedDisk, SavedDisk, ScalingScheduleStatus, Scheduling, SchedulingNodeAffinity, + SchedulingOnInstanceStopAction, ScratchDisks, Screenshot, SecurityPoliciesAggregatedList, @@ -1162,6 +1164,7 @@ SecurityPolicyAdaptiveProtectionConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig, + SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig, SecurityPolicyAdvancedOptionsConfig, SecurityPolicyAdvancedOptionsConfigJsonCustomConfig, SecurityPolicyDdosProtectionConfig, @@ -1612,7 +1615,6 @@ "AttachNetworkEndpointsRegionNetworkEndpointGroupRequest", "AuditConfig", "AuditLogConfig", - "AuthorizationLoggingOptions", "Autoscaler", "AutoscalerAggregatedList", "AutoscalerList", @@ -1632,6 +1634,7 @@ "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketUsedBy", "BackendService", "BackendServiceAggregatedList", "BackendServiceCdnPolicy", @@ -1640,6 +1643,7 @@ "BackendServiceConnectionTrackingPolicy", "BackendServiceFailoverPolicy", "BackendServiceGroupHealth", + "BackendServiceHttpCookie", "BackendServiceIAP", "BackendServiceList", "BackendServiceListUsable", @@ -1672,8 +1676,8 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentResourceStatus", "CommitmentsScopedList", - "Condition", "ConfidentialInstanceConfig", "ConnectionDraining", "ConsistentHashLoadBalancerSettings", @@ -1685,6 +1689,8 @@ "CreateSnapshotRegionDiskRequest", "CustomerEncryptionKey", "CustomerEncryptionKeyProtectedDisk", + "CustomErrorResponsePolicy", + "CustomErrorResponsePolicyCustomErrorResponseRule", "Data", "DeleteAccessConfigInstanceRequest", "DeleteAddressRequest", @@ -2121,6 +2127,8 @@ "InstanceGroupManagerAggregatedList", "InstanceGroupManagerAllInstancesConfig", "InstanceGroupManagerAutoHealingPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", "InstanceGroupManagerResizeRequest", @@ -2350,11 +2358,6 @@ "LocationPolicy", "LocationPolicyLocation", "LocationPolicyLocationConstraints", - "LogConfig", - "LogConfigCloudAuditOptions", - "LogConfigCounterOptions", - "LogConfigCounterOptionsCustomField", - "LogConfigDataAccessOptions", "MachineImage", "MachineImageList", "MachineType", @@ -2364,6 +2367,7 @@ "ManagedInstance", "ManagedInstanceInstanceHealth", "ManagedInstanceLastAttempt", + "ManagedInstancePropertiesFromFlexibilityPolicy", "ManagedInstanceVersion", "Metadata", "MetadataFilter", @@ -2619,6 +2623,7 @@ "ResourcePolicyWeeklyCycle", "ResourcePolicyWeeklyCycleDayOfWeek", "ResourceStatus", + "ResourceStatusScheduling", "ResumeInstanceRequest", "Route", "RouteAsPath", @@ -2645,12 +2650,12 @@ "RouterStatusNatStatus", "RouterStatusNatStatusNatRuleStatus", "RouterStatusResponse", - "Rule", "SavedAttachedDisk", "SavedDisk", "ScalingScheduleStatus", "Scheduling", "SchedulingNodeAffinity", + "SchedulingOnInstanceStopAction", "ScratchDisks", "Screenshot", "SecurityPoliciesAggregatedList", @@ -2661,6 +2666,7 @@ "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py index 89b4d61be105..0eca84477cb9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py @@ -123,7 +123,6 @@ "AttachedDiskInitializeParams", "AuditConfig", "AuditLogConfig", - "AuthorizationLoggingOptions", "Autoscaler", "AutoscalerAggregatedList", "AutoscalerList", @@ -142,6 +141,7 @@ "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketUsedBy", "BackendService", "BackendServiceAggregatedList", "BackendServiceCdnPolicy", @@ -150,6 +150,7 @@ "BackendServiceConnectionTrackingPolicy", "BackendServiceFailoverPolicy", "BackendServiceGroupHealth", + "BackendServiceHttpCookie", "BackendServiceIAP", "BackendServiceList", "BackendServiceListUsable", @@ -182,8 +183,8 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentResourceStatus", "CommitmentsScopedList", - "Condition", "ConfidentialInstanceConfig", "ConnectionDraining", "ConsistentHashLoadBalancerSettings", @@ -193,6 +194,8 @@ "CreateInstancesRegionInstanceGroupManagerRequest", "CreateSnapshotDiskRequest", "CreateSnapshotRegionDiskRequest", + "CustomErrorResponsePolicy", + "CustomErrorResponsePolicyCustomErrorResponseRule", "CustomerEncryptionKey", "CustomerEncryptionKeyProtectedDisk", "Data", @@ -631,6 +634,8 @@ "InstanceGroupManagerAggregatedList", "InstanceGroupManagerAllInstancesConfig", "InstanceGroupManagerAutoHealingPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicy", + "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", "InstanceGroupManagerResizeRequest", @@ -860,11 +865,6 @@ "LocationPolicy", "LocationPolicyLocation", "LocationPolicyLocationConstraints", - "LogConfig", - "LogConfigCloudAuditOptions", - "LogConfigCounterOptions", - "LogConfigCounterOptionsCustomField", - "LogConfigDataAccessOptions", "MachineImage", "MachineImageList", "MachineType", @@ -874,6 +874,7 @@ "ManagedInstance", "ManagedInstanceInstanceHealth", "ManagedInstanceLastAttempt", + "ManagedInstancePropertiesFromFlexibilityPolicy", "ManagedInstanceVersion", "Metadata", "MetadataFilter", @@ -1129,6 +1130,7 @@ "ResourcePolicyWeeklyCycle", "ResourcePolicyWeeklyCycleDayOfWeek", "ResourceStatus", + "ResourceStatusScheduling", "ResumeInstanceRequest", "Route", "RouteAsPath", @@ -1155,13 +1157,13 @@ "RouterStatusResponse", "RoutersPreviewResponse", "RoutersScopedList", - "Rule", "SSLHealthCheck", "SavedAttachedDisk", "SavedDisk", "ScalingScheduleStatus", "Scheduling", "SchedulingNodeAffinity", + "SchedulingOnInstanceStopAction", "ScratchDisks", "Screenshot", "SecurityPoliciesAggregatedList", @@ -1172,6 +1174,7 @@ "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", @@ -4086,6 +4089,12 @@ class AdvancedMachineFeatures(proto.Message): instance creation. This field is a member of `oneof`_ ``_enable_uefi_networking``. + performance_monitoring_unit (str): + Type of Performance Monitoring Unit requested + on instance. Check the PerformanceMonitoringUnit + enum for the list of possible values. + + This field is a member of `oneof`_ ``_performance_monitoring_unit``. threads_per_core (int): The number of threads per physical core. To disable simultaneous multithreading (SMT) set @@ -4094,6 +4103,13 @@ class AdvancedMachineFeatures(proto.Message): processor is assumed. This field is a member of `oneof`_ ``_threads_per_core``. + turbo_mode (str): + Turbo frequency mode to use for the instance. Supported + modes include: \* ALL_CORE_MAX Using empty string or not + setting this field will use the platform-specific default + turbo mode. + + This field is a member of `oneof`_ ``_turbo_mode``. visible_core_count (int): The number of physical cores to expose to an instance. Multiply by the number of threads per @@ -4106,6 +4122,28 @@ class AdvancedMachineFeatures(proto.Message): This field is a member of `oneof`_ ``_visible_core_count``. """ + class PerformanceMonitoringUnit(proto.Enum): + r"""Type of Performance Monitoring Unit requested on instance. + + Values: + UNDEFINED_PERFORMANCE_MONITORING_UNIT (0): + A value indicating that the enum field is not + set. + ARCHITECTURAL (155113693): + Architecturally defined non-LLC events. + ENHANCED (476873590): + Most documented core/L2 and LLC events. + PERFORMANCE_MONITORING_UNIT_UNSPECIFIED (429210692): + No description available. + STANDARD (484642493): + Most documented core/L2 events. + """ + UNDEFINED_PERFORMANCE_MONITORING_UNIT = 0 + ARCHITECTURAL = 155113693 + ENHANCED = 476873590 + PERFORMANCE_MONITORING_UNIT_UNSPECIFIED = 429210692 + STANDARD = 484642493 + enable_nested_virtualization: bool = proto.Field( proto.BOOL, number=16639365, @@ -4116,11 +4154,21 @@ class AdvancedMachineFeatures(proto.Message): number=334485668, optional=True, ) + performance_monitoring_unit: str = proto.Field( + proto.STRING, + number=533618956, + optional=True, + ) threads_per_core: int = proto.Field( proto.INT32, number=352611671, optional=True, ) + turbo_mode: str = proto.Field( + proto.STRING, + number=432965412, + optional=True, + ) visible_core_count: int = proto.Field( proto.INT32, number=193198684, @@ -10983,16 +11031,22 @@ class VmFamily(proto.Enum): UNDEFINED_VM_FAMILY (0): A value indicating that the enum field is not set. + VM_FAMILY_CLOUD_TPU_DEVICE_CT3 (42845948): + No description available. VM_FAMILY_CLOUD_TPU_LITE_DEVICE_CT5L (108020067): No description available. VM_FAMILY_CLOUD_TPU_LITE_POD_SLICE_CT5LP (18705267): No description available. + VM_FAMILY_CLOUD_TPU_POD_SLICE_CT3P (517384376): + No description available. VM_FAMILY_CLOUD_TPU_POD_SLICE_CT4P (517384407): No description available. """ UNDEFINED_VM_FAMILY = 0 + VM_FAMILY_CLOUD_TPU_DEVICE_CT3 = 42845948 VM_FAMILY_CLOUD_TPU_LITE_DEVICE_CT5L = 108020067 VM_FAMILY_CLOUD_TPU_LITE_POD_SLICE_CT5LP = 18705267 + VM_FAMILY_CLOUD_TPU_POD_SLICE_CT3P = 517384376 VM_FAMILY_CLOUD_TPU_POD_SLICE_CT4P = 517384407 class WorkloadType(proto.Enum): @@ -11849,10 +11903,13 @@ class AttachedDisk(proto.Message): instance. If you do not provide an encryption key, then the disk will be encrypted using an automatically generated key and you do not need - to provide a key to use the disk later. Instance - templates do not store customer-supplied - encryption keys, so you cannot use your own keys - to encrypt disks in a managed instance group. + to provide a key to use the disk later. Note: + Instance templates do not store + customer-supplied encryption keys, so you cannot + use your own keys to encrypt disks in a managed + instance group. You cannot create VMs that have + disks with customer-supplied keys using the bulk + insert method. This field is a member of `oneof`_ ``_disk_encryption_key``. disk_size_gb (int): @@ -11927,13 +11984,13 @@ class AttachedDisk(proto.Message): source (str): Specifies a valid partial or full URL to an existing Persistent Disk resource. When creating - a new instance, one of + a new instance boot disk, one of initializeParams.sourceImage or initializeParams.sourceSnapshot or disks.source - is required except for local SSD. If desired, - you can also attach existing non-root persistent - disks using this property. This field is only - applicable for persistent disks. Note that for + is required. If desired, you can also attach + existing non-root persistent disks using this + property. This field is only applicable for + persistent disks. Note that for InstanceTemplate, specify the disk name for zonal disk, and the URL for regional disk. @@ -12257,14 +12314,14 @@ class AttachedDiskInitializeParams(proto.Message): specify only the resource policy name. source_image (str): The source image to create this disk. When - creating a new instance, one of + creating a new instance boot disk, one of initializeParams.sourceImage or initializeParams.sourceSnapshot or disks.source - is required except for local SSD. To create a - disk with one of the public operating system - images, specify the image by its family name. - For example, specify family/debian-9 to use the - latest Debian 9 image: + is required. To create a disk with one of the + public operating system images, specify the + image by its family name. For example, specify + family/debian-9 to use the latest Debian 9 + image: projects/debian-cloud/global/images/family/debian-9 Alternatively, use a specific version of a public operating system image: @@ -12294,15 +12351,14 @@ class AttachedDiskInitializeParams(proto.Message): This field is a member of `oneof`_ ``_source_image_encryption_key``. source_snapshot (str): The source snapshot to create this disk. When - creating a new instance, one of + creating a new instance boot disk, one of initializeParams.sourceSnapshot or initializeParams.sourceImage or disks.source is - required except for local SSD. To create a disk - with a snapshot that you created, specify the - snapshot name in the following format: - global/snapshots/my-backup If the source - snapshot is deleted later, this field will not - be set. + required. To create a disk with a snapshot that + you created, specify the snapshot name in the + following format: global/snapshots/my-backup If + the source snapshot is deleted later, this field + will not be set. This field is a member of `oneof`_ ``_source_snapshot``. source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): @@ -12480,8 +12536,8 @@ class AuditConfig(proto.Message): "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also - exempts jose@example.com from DATA_READ logging, and - aliya@example.com from DATA_WRITE logging. + exempts ``jose@example.com`` from DATA_READ logging, and + ``aliya@example.com`` from DATA_WRITE logging. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -12491,8 +12547,7 @@ class AuditConfig(proto.Message): The configuration for logging of each type of permission. exempted_members (MutableSequence[str]): - This is deprecated and has no effect. Do not - use. + service (str): Specifies a service that will be enabled for audit logging. For example, ``storage.googleapis.com``, @@ -12534,8 +12589,6 @@ class AuditLogConfig(proto.Message): logging for this type of permission. Follows the same format of Binding.members. ignore_child_exemptions (bool): - This is deprecated and has no effect. Do not - use. This field is a member of `oneof`_ ``_ignore_child_exemptions``. log_type (str): @@ -12584,57 +12637,6 @@ class LogType(proto.Enum): ) -class AuthorizationLoggingOptions(proto.Message): - r"""This is deprecated and has no effect. Do not use. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - permission_type (str): - This is deprecated and has no effect. Do not - use. Check the PermissionType enum for the list - of possible values. - - This field is a member of `oneof`_ ``_permission_type``. - """ - - class PermissionType(proto.Enum): - r"""This is deprecated and has no effect. Do not use. - - Values: - UNDEFINED_PERMISSION_TYPE (0): - A value indicating that the enum field is not - set. - ADMIN_READ (128951462): - This is deprecated and has no effect. Do not - use. - ADMIN_WRITE (244412079): - This is deprecated and has no effect. Do not - use. - DATA_READ (305224971): - This is deprecated and has no effect. Do not - use. - DATA_WRITE (340181738): - This is deprecated and has no effect. Do not - use. - PERMISSION_TYPE_UNSPECIFIED (440313346): - This is deprecated and has no effect. Do not - use. - """ - UNDEFINED_PERMISSION_TYPE = 0 - ADMIN_READ = 128951462 - ADMIN_WRITE = 244412079 - DATA_READ = 305224971 - DATA_WRITE = 340181738 - PERMISSION_TYPE_UNSPECIFIED = 440313346 - - permission_type: str = proto.Field( - proto.STRING, - number=525978538, - optional=True, - ) - - class Autoscaler(proto.Message): r"""Represents an Autoscaler resource. Google Compute Engine has two Autoscaler resources: \* @@ -14097,6 +14099,9 @@ class BackendBucket(proto.Message): [Output Only] Server-defined URL for the resource. This field is a member of `oneof`_ ``_self_link``. + used_by (MutableSequence[google.cloud.compute_v1.types.BackendBucketUsedBy]): + [Output Only] List of resources referencing that backend + bucket. """ class CompressionMode(proto.Enum): @@ -14180,6 +14185,11 @@ class CompressionMode(proto.Enum): number=456214797, optional=True, ) + used_by: MutableSequence["BackendBucketUsedBy"] = proto.RepeatedField( + proto.MESSAGE, + number=389320729, + message="BackendBucketUsedBy", + ) class BackendBucketCdnPolicy(proto.Message): @@ -14605,6 +14615,26 @@ def raw_page(self): ) +class BackendBucketUsedBy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reference (str): + [Output Only] Server-defined URL for UrlMaps referencing + that BackendBucket. + + This field is a member of `oneof`_ ``_reference``. + """ + + reference: str = proto.Field( + proto.STRING, + number=148586315, + optional=True, + ) + + class BackendService(proto.Message): r"""Represents a Backend Service resource. A backend service defines how Google Cloud load balancers distribute traffic. The backend service @@ -14758,6 +14788,34 @@ class BackendService(proto.Message): identifier is defined by the server. This field is a member of `oneof`_ ``_id``. + ip_address_selection_policy (str): + Specifies a preference for traffic sent from the proxy to + the backend (or from the client to the backend for proxyless + gRPC). The possible values are: - IPV4_ONLY: Only send IPv4 + traffic to the backends of the backend service (Instance + Group, Managed Instance Group, Network Endpoint Group), + regardless of traffic from the client to the proxy. Only + IPv4 health checks are used to check the health of the + backends. This is the default setting. - PREFER_IPV6: + Prioritize the connection to the endpoint's IPv6 address + over its IPv4 address (provided there is a healthy IPv6 + address). - IPV6_ONLY: Only send IPv6 traffic to the + backends of the backend service (Instance Group, Managed + Instance Group, Network Endpoint Group), regardless of + traffic from the client to the proxy. Only IPv6 health + checks are used to check the health of the backends. This + field is applicable to either: - Advanced global external + Application Load Balancer (load balancing scheme + EXTERNAL_MANAGED), - Regional external Application Load + Balancer, - Internal proxy Network Load Balancer (load + balancing scheme INTERNAL_MANAGED), - Regional internal + Application Load Balancer (load balancing scheme + INTERNAL_MANAGED), - Traffic Director with Envoy proxies and + proxyless gRPC (load balancing scheme + INTERNAL_SELF_MANAGED). Check the IpAddressSelectionPolicy + enum for the list of possible values. + + This field is a member of `oneof`_ ``_ip_address_selection_policy``. kind (str): [Output Only] Type of resource. Always compute#backendService for backend services. @@ -14810,13 +14868,16 @@ class BackendService(proto.Message): load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED, INTERNAL_MANAGED, or - EXTERNAL_MANAGED. If sessionAffinity is not NONE, and this - field is not set to MAGLEV or RING_HASH, session affinity - settings will not take effect. Only ROUND_ROBIN and - RING_HASH are supported when the backend service is - referenced by a URL map that is bound to target gRPC proxy - that has validateForProxyless field set to true. Check the - LocalityLbPolicy enum for the list of possible values. + EXTERNAL_MANAGED. If sessionAffinity is not configured—that + is, if session affinity remains at the default value of + NONE—then the default value for localityLbPolicy is + ROUND_ROBIN. If session affinity is set to a value other + than NONE, then the default value for localityLbPolicy is + MAGLEV. Only ROUND_ROBIN and RING_HASH are supported when + the backend service is referenced by a URL map that is bound + to target gRPC proxy that has validateForProxyless field set + to true. Check the LocalityLbPolicy enum for the list of + possible values. This field is a member of `oneof`_ ``_locality_lb_policy``. log_config (google.cloud.compute_v1.types.BackendServiceLogConfig): @@ -14972,6 +15033,12 @@ class BackendService(proto.Message): values. This field is a member of `oneof`_ ``_session_affinity``. + strong_session_affinity_cookie (google.cloud.compute_v1.types.BackendServiceHttpCookie): + Describes the HTTP cookie used for stateful session + affinity. This field is applicable and required if the + sessionAffinity is set to STRONG_COOKIE_AFFINITY. + + This field is a member of `oneof`_ ``_strong_session_affinity_cookie``. subsetting (google.cloud.compute_v1.types.Subsetting): This field is a member of `oneof`_ ``_subsetting``. @@ -14991,7 +15058,8 @@ class BackendService(proto.Message): This field is a member of `oneof`_ ``_timeout_sec``. used_by (MutableSequence[google.cloud.compute_v1.types.BackendServiceUsedBy]): - + [Output Only] List of resources referencing given backend + service. """ class CompressionMode(proto.Enum): @@ -15015,6 +15083,60 @@ class CompressionMode(proto.Enum): AUTOMATIC = 165298699 DISABLED = 516696700 + class IpAddressSelectionPolicy(proto.Enum): + r"""Specifies a preference for traffic sent from the proxy to the + backend (or from the client to the backend for proxyless gRPC). The + possible values are: - IPV4_ONLY: Only send IPv4 traffic to the + backends of the backend service (Instance Group, Managed Instance + Group, Network Endpoint Group), regardless of traffic from the + client to the proxy. Only IPv4 health checks are used to check the + health of the backends. This is the default setting. - PREFER_IPV6: + Prioritize the connection to the endpoint's IPv6 address over its + IPv4 address (provided there is a healthy IPv6 address). - + IPV6_ONLY: Only send IPv6 traffic to the backends of the backend + service (Instance Group, Managed Instance Group, Network Endpoint + Group), regardless of traffic from the client to the proxy. Only + IPv6 health checks are used to check the health of the backends. + This field is applicable to either: - Advanced global external + Application Load Balancer (load balancing scheme EXTERNAL_MANAGED), + - Regional external Application Load Balancer, - Internal proxy + Network Load Balancer (load balancing scheme INTERNAL_MANAGED), - + Regional internal Application Load Balancer (load balancing scheme + INTERNAL_MANAGED), - Traffic Director with Envoy proxies and + proxyless gRPC (load balancing scheme INTERNAL_SELF_MANAGED). + + Values: + UNDEFINED_IP_ADDRESS_SELECTION_POLICY (0): + A value indicating that the enum field is not + set. + IPV4_ONLY (22373798): + Only send IPv4 traffic to the backends of the + Backend Service (Instance Group, Managed + Instance Group, Network Endpoint Group) + regardless of traffic from the client to the + proxy. Only IPv4 health-checks are used to check + the health of the backends. This is the default + setting. + IPV6_ONLY (79632100): + Only send IPv6 traffic to the backends of the + Backend Service (Instance Group, Managed + Instance Group, Network Endpoint Group) + regardless of traffic from the client to the + proxy. Only IPv6 health-checks are used to check + the health of the backends. + IP_ADDRESS_SELECTION_POLICY_UNSPECIFIED (36210144): + Unspecified IP address selection policy. + PREFER_IPV6 (408601302): + Prioritize the connection to the endpoints + IPv6 address over its IPv4 address (provided + there is a healthy IPv6 address). + """ + UNDEFINED_IP_ADDRESS_SELECTION_POLICY = 0 + IPV4_ONLY = 22373798 + IPV6_ONLY = 79632100 + IP_ADDRESS_SELECTION_POLICY_UNSPECIFIED = 36210144 + PREFER_IPV6 = 408601302 + class LoadBalancingScheme(proto.Enum): r"""Specifies the load balancer type. A backend service created for one type of load balancer cannot be used with another. For @@ -15078,8 +15200,10 @@ class LocalityLbPolicy(proto.Enum): INTERNAL_MANAGED. - A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED, INTERNAL_MANAGED, or EXTERNAL_MANAGED. If sessionAffinity is not - NONE, and this field is not set to MAGLEV or RING_HASH, session - affinity settings will not take effect. Only ROUND_ROBIN and + configured—that is, if session affinity remains at the default value + of NONE—then the default value for localityLbPolicy is ROUND_ROBIN. + If session affinity is set to a value other than NONE, then the + default value for localityLbPolicy is MAGLEV. Only ROUND_ROBIN and RING_HASH are supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. @@ -15243,6 +15367,11 @@ class SessionAffinity(proto.Enum): No session affinity. Connections from the same client IP may go to any instance in the pool. + STRONG_COOKIE_AFFINITY (438628091): + Strong cookie-based affinity. Connections + bearing the same cookie will be served by the + same backend VM while that VM remains healthy, + as long as the cookie has not expired. """ UNDEFINED_SESSION_AFFINITY = 0 CLIENT_IP = 345665051 @@ -15253,6 +15382,7 @@ class SessionAffinity(proto.Enum): HEADER_FIELD = 200737960 HTTP_COOKIE = 494981627 NONE = 2402104 + STRONG_COOKIE_AFFINITY = 438628091 affinity_cookie_ttl_sec: int = proto.Field( proto.INT32, @@ -15353,6 +15483,11 @@ class SessionAffinity(proto.Enum): number=3355, optional=True, ) + ip_address_selection_policy: str = proto.Field( + proto.STRING, + number=77600840, + optional=True, + ) kind: str = proto.Field( proto.STRING, number=3292052, @@ -15458,6 +15593,12 @@ class SessionAffinity(proto.Enum): number=463888561, optional=True, ) + strong_session_affinity_cookie: "BackendServiceHttpCookie" = proto.Field( + proto.MESSAGE, + number=238195722, + optional=True, + message="BackendServiceHttpCookie", + ) subsetting: "Subsetting" = proto.Field( proto.MESSAGE, number=450283536, @@ -16141,6 +16282,44 @@ class BackendServiceGroupHealth(proto.Message): ) +class BackendServiceHttpCookie(proto.Message): + r"""The HTTP cookie used for stateful session affinity. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the cookie. + + This field is a member of `oneof`_ ``_name``. + path (str): + Path to set for the cookie. + + This field is a member of `oneof`_ ``_path``. + ttl (google.cloud.compute_v1.types.Duration): + Lifetime of the cookie. + + This field is a member of `oneof`_ ``_ttl``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + path: str = proto.Field( + proto.STRING, + number=3433509, + optional=True, + ) + ttl: "Duration" = proto.Field( + proto.MESSAGE, + number=115180, + optional=True, + message="Duration", + ) + + class BackendServiceIAP(proto.Message): r"""Identity-Aware Proxy @@ -16621,6 +16800,9 @@ class BackendServiceUsedBy(proto.Message): Attributes: reference (str): + [Output Only] Server-defined URL for resources referencing + given BackendService like UrlMaps, TargetTcpProxies, + TargetSslProxies and ForwardingRule. This field is a member of `oneof`_ ``_reference``. """ @@ -17193,8 +17375,6 @@ class Binding(proto.Message): Attributes: binding_id (str): - This is deprecated and has no effect. Do not - use. This field is a member of `oneof`_ ``_binding_id``. condition (google.cloud.compute_v1.types.Expr): @@ -17464,8 +17644,8 @@ class BulkInsertInstanceResource(proto.Message): This field is a member of `oneof`_ ``_instance_properties``. location_policy (google.cloud.compute_v1.types.LocationPolicy): - Policy for chosing target zone. For more - information, see Create VMs in bulk . + Policy for choosing target zone. For more + information, see Create VMs in bulk. This field is a member of `oneof`_ ``_location_policy``. min_count (int): @@ -18243,6 +18423,13 @@ class Commitment(proto.Message): [Output Only] Creation timestamp in RFC3339 text format. This field is a member of `oneof`_ ``_creation_timestamp``. + custom_end_timestamp (str): + [Input Only] Optional, specifies the CUD end time requested + by the customer in RFC3339 text format. Needed when the + customer wants CUD's end date is later than the start date + + term duration. + + This field is a member of `oneof`_ ``_custom_end_timestamp``. description (str): An optional description of this resource. Provide this property when you create the @@ -18306,6 +18493,10 @@ class Commitment(proto.Message): reservations (MutableSequence[google.cloud.compute_v1.types.Reservation]): List of create-on-create reservations for this commitment. + resource_status (google.cloud.compute_v1.types.CommitmentResourceStatus): + [Output Only] Status information for Commitment resource. + + This field is a member of `oneof`_ ``_resource_status``. resources (MutableSequence[google.cloud.compute_v1.types.ResourceCommitment]): A list of commitment amounts for particular resources. Note that VCPU and MEMORY resource @@ -18450,6 +18641,8 @@ class Type(proto.Enum): No description available. GENERAL_PURPOSE (299793543): No description available. + GENERAL_PURPOSE_C4 (301911817): + No description available. GENERAL_PURPOSE_E2 (301911877): No description available. GENERAL_PURPOSE_N2 (301912156): @@ -18481,6 +18674,7 @@ class Type(proto.Enum): COMPUTE_OPTIMIZED_C3D = 383246484 COMPUTE_OPTIMIZED_H3 = 428004939 GENERAL_PURPOSE = 299793543 + GENERAL_PURPOSE_C4 = 301911817 GENERAL_PURPOSE_E2 = 301911877 GENERAL_PURPOSE_N2 = 301912156 GENERAL_PURPOSE_N2D = 232471400 @@ -18507,6 +18701,11 @@ class Type(proto.Enum): number=30525366, optional=True, ) + custom_end_timestamp: str = proto.Field( + proto.STRING, + number=181770852, + optional=True, + ) description: str = proto.Field( proto.STRING, number=422937596, @@ -18561,6 +18760,12 @@ class Type(proto.Enum): number=399717927, message="Reservation", ) + resource_status: "CommitmentResourceStatus" = proto.Field( + proto.MESSAGE, + number=249429315, + optional=True, + message="CommitmentResourceStatus", + ) resources: MutableSequence["ResourceCommitment"] = proto.RepeatedField( proto.MESSAGE, number=164412965, @@ -18754,6 +18959,28 @@ def raw_page(self): ) +class CommitmentResourceStatus(proto.Message): + r"""[Output Only] Contains output only fields. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + custom_term_eligibility_end_timestamp (str): + [Output Only] Indicates the end time of customer's + eligibility to send custom term requests in RFC3339 text + format. Term extension requests that (not the end time in + the request) after this time will be rejected. + + This field is a member of `oneof`_ ``_custom_term_eligibility_end_timestamp``. + """ + + custom_term_eligibility_end_timestamp: str = proto.Field( + proto.STRING, + number=363976187, + optional=True, + ) + + class CommitmentsScopedList(proto.Message): r""" @@ -18782,181 +19009,55 @@ class CommitmentsScopedList(proto.Message): ) -class Condition(proto.Message): - r"""This is deprecated and has no effect. Do not use. +class ConfidentialInstanceConfig(proto.Message): + r"""A set of Confidential Instance options. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - iam (str): - This is deprecated and has no effect. Do not - use. Check the Iam enum for the list of possible - values. - - This field is a member of `oneof`_ ``_iam``. - op (str): - This is deprecated and has no effect. Do not - use. Check the Op enum for the list of possible - values. + confidential_instance_type (str): + Defines the type of technology used by the + confidential instance. Check the + ConfidentialInstanceType enum for the list of + possible values. - This field is a member of `oneof`_ ``_op``. - svc (str): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_svc``. - sys (str): - This is deprecated and has no effect. Do not - use. Check the Sys enum for the list of possible - values. + This field is a member of `oneof`_ ``_confidential_instance_type``. + enable_confidential_compute (bool): + Defines whether the instance should have + confidential compute enabled. - This field is a member of `oneof`_ ``_sys``. - values (MutableSequence[str]): - This is deprecated and has no effect. Do not - use. + This field is a member of `oneof`_ ``_enable_confidential_compute``. """ - class Iam(proto.Enum): - r"""This is deprecated and has no effect. Do not use. Additional - supported values which may be not listed in the enum directly due to - technical reasons: NO_ATTR - - Values: - UNDEFINED_IAM (0): - A value indicating that the enum field is not - set. - APPROVER (357258949): - This is deprecated and has no effect. Do not - use. - ATTRIBUTION (232465503): - This is deprecated and has no effect. Do not - use. - AUTHORITY (504108835): - This is deprecated and has no effect. Do not - use. - CREDENTIALS_TYPE (348222141): - This is deprecated and has no effect. Do not - use. - CREDS_ASSERTION (332343272): - This is deprecated and has no effect. Do not - use. - JUSTIFICATION_TYPE (206147719): - This is deprecated and has no effect. Do not - use. - SECURITY_REALM (526269616): - This is deprecated and has no effect. Do not - use. - """ - UNDEFINED_IAM = 0 - APPROVER = 357258949 - ATTRIBUTION = 232465503 - AUTHORITY = 504108835 - CREDENTIALS_TYPE = 348222141 - CREDS_ASSERTION = 332343272 - JUSTIFICATION_TYPE = 206147719 - SECURITY_REALM = 526269616 - - class Op(proto.Enum): - r"""This is deprecated and has no effect. Do not use. - - Values: - UNDEFINED_OP (0): - A value indicating that the enum field is not - set. - DISCHARGED (266338274): - This is deprecated and has no effect. Do not - use. - EQUALS (442201023): - This is deprecated and has no effect. Do not - use. - IN (2341): - This is deprecated and has no effect. Do not - use. - NOT_EQUALS (19718859): - This is deprecated and has no effect. Do not - use. - NOT_IN (161144369): - This is deprecated and has no effect. Do not - use. - NO_OP (74481951): - This is deprecated and has no effect. Do not - use. - """ - UNDEFINED_OP = 0 - DISCHARGED = 266338274 - EQUALS = 442201023 - IN = 2341 - NOT_EQUALS = 19718859 - NOT_IN = 161144369 - NO_OP = 74481951 - - class Sys(proto.Enum): - r"""This is deprecated and has no effect. Do not use. Additional - supported values which may be not listed in the enum directly due to - technical reasons: NO_ATTR + class ConfidentialInstanceType(proto.Enum): + r"""Defines the type of technology used by the confidential + instance. Values: - UNDEFINED_SYS (0): + UNDEFINED_CONFIDENTIAL_INSTANCE_TYPE (0): A value indicating that the enum field is not set. - IP (2343): - This is deprecated and has no effect. Do not - use. - NAME (2388619): - This is deprecated and has no effect. Do not - use. - REGION (266017524): - This is deprecated and has no effect. Do not - use. - SERVICE (17781397): - This is deprecated and has no effect. Do not - use. + CONFIDENTIAL_INSTANCE_TYPE_UNSPECIFIED (115021829): + No type specified. Do not use this value. + SEV (81988): + AMD Secure Encrypted Virtualization. + SEV_SNP (21753562): + AMD Secure Encrypted Virtualization - Secure + Nested Paging. + TDX (82920): + Intel Trust Domain eXtension. """ - UNDEFINED_SYS = 0 - IP = 2343 - NAME = 2388619 - REGION = 266017524 - SERVICE = 17781397 + UNDEFINED_CONFIDENTIAL_INSTANCE_TYPE = 0 + CONFIDENTIAL_INSTANCE_TYPE_UNSPECIFIED = 115021829 + SEV = 81988 + SEV_SNP = 21753562 + TDX = 82920 - iam: str = proto.Field( - proto.STRING, - number=104021, - optional=True, - ) - op: str = proto.Field( - proto.STRING, - number=3553, - optional=True, - ) - svc: str = proto.Field( - proto.STRING, - number=114272, - optional=True, - ) - sys: str = proto.Field( + confidential_instance_type: str = proto.Field( proto.STRING, - number=114381, + number=43484717, optional=True, ) - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=249928994, - ) - - -class ConfidentialInstanceConfig(proto.Message): - r"""A set of Confidential Instance options. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enable_confidential_compute (bool): - Defines whether the instance should have - confidential compute enabled. - - This field is a member of `oneof`_ ``_enable_confidential_compute``. - """ - enable_confidential_compute: bool = proto.Field( proto.BOOL, number=102135228, @@ -19429,6 +19530,122 @@ class CreateSnapshotRegionDiskRequest(proto.Message): ) +class CustomErrorResponsePolicy(proto.Message): + r"""Specifies the custom error response policy that must be + applied when the backend service or backend bucket responds with + an error. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + error_response_rules (MutableSequence[google.cloud.compute_v1.types.CustomErrorResponsePolicyCustomErrorResponseRule]): + Specifies rules for returning error + responses. In a given policy, if you specify + rules for both a range of error codes as well as + rules for specific error codes then rules with + specific error codes have a higher priority. For + example, assume that you configure a rule for + 401 (Un-authorized) code, and another for all 4 + series error codes (4XX). If the backend service + returns a 401, then the rule for 401 will be + applied. However if the backend service returns + a 403, the rule for 4xx takes effect. + error_service (str): + The full or partial URL to the BackendBucket resource that + contains the custom error content. Examples are: - + https://www.googleapis.com/compute/v1/projects/project/global/backendBuckets/myBackendBucket + - + compute/v1/projects/project/global/backendBuckets/myBackendBucket + - global/backendBuckets/myBackendBucket If errorService is + not specified at lower levels like pathMatcher, pathRule and + routeRule, an errorService specified at a higher level in + the UrlMap will be used. If + UrlMap.defaultCustomErrorResponsePolicy contains one or more + errorResponseRules[], it must specify errorService. If load + balancer cannot reach the backendBucket, a simple Not Found + Error will be returned, with the original response code (or + overrideResponseCode if configured). errorService is not + supported for internal or regional HTTP/HTTPS load + balancers. + + This field is a member of `oneof`_ ``_error_service``. + """ + + error_response_rules: MutableSequence[ + "CustomErrorResponsePolicyCustomErrorResponseRule" + ] = proto.RepeatedField( + proto.MESSAGE, + number=14620304, + message="CustomErrorResponsePolicyCustomErrorResponseRule", + ) + error_service: str = proto.Field( + proto.STRING, + number=164214654, + optional=True, + ) + + +class CustomErrorResponsePolicyCustomErrorResponseRule(proto.Message): + r"""Specifies the mapping between the response code that will be + returned along with the custom error content and the response + code returned by the backend service. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + match_response_codes (MutableSequence[str]): + Valid values include: - A number between 400 + and 599: For example 401 or 503, in which case + the load balancer applies the policy if the + error code exactly matches this value. - 5xx: + Load Balancer will apply the policy if the + backend service responds with any response code + in the range of 500 to 599. - 4xx: Load Balancer + will apply the policy if the backend service + responds with any response code in the range of + 400 to 499. Values must be unique within + matchResponseCodes and across all + errorResponseRules of CustomErrorResponsePolicy. + override_response_code (int): + The HTTP status code returned with the + response containing the custom error content. If + overrideResponseCode is not supplied, the same + response code returned by the original backend + bucket or backend service is returned to the + client. + + This field is a member of `oneof`_ ``_override_response_code``. + path (str): + The full path to a file within backendBucket + . For example: /errors/defaultError.html path + must start with a leading slash. path cannot + have trailing slashes. If the file is not + available in backendBucket or the load balancer + cannot reach the BackendBucket, a simple Not + Found Error is returned to the client. The value + must be from 1 to 1024 characters + + This field is a member of `oneof`_ ``_path``. + """ + + match_response_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104973410, + ) + override_response_code: int = proto.Field( + proto.INT32, + number=530328568, + optional=True, + ) + path: str = proto.Field( + proto.STRING, + number=3433509, + optional=True, + ) + + class CustomerEncryptionKey(proto.Message): r""" @@ -20244,7 +20461,8 @@ class DeleteGlobalOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to delete. + Name of the Operations resource to delete, or + its unique numeric identifier. project (str): Project ID for this request. """ @@ -20275,7 +20493,8 @@ class DeleteGlobalOrganizationOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to delete. + Name of the Operations resource to delete, or + its unique numeric identifier. parent_id (str): Parent ID for this request. @@ -22342,7 +22561,8 @@ class DeleteRegionOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to delete. + Name of the Operations resource to delete, or + its unique numeric identifier. project (str): Project ID for this request. region (str): @@ -23960,7 +24180,8 @@ class DeleteZoneOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to delete. + Name of the Operations resource to delete, or + its unique numeric identifier. project (str): Project ID for this request. zone (str): @@ -24537,6 +24758,17 @@ class Disk(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + access_mode (str): + The access mode of the disk. - READ_WRITE_SINGLE: The + default AccessMode, means the disk can be attached to single + instance in RW mode. - READ_WRITE_MANY: The AccessMode means + the disk can be attached to multiple instances in RW mode. - + READ_ONLY_MANY: The AccessMode means the disk can be + attached to multiple instances in RO mode. The AccessMode is + only valid for Hyperdisk disk types. Check the AccessMode + enum for the list of possible values. + + This field is a member of `oneof`_ ``_access_mode``. architecture (str): The architecture of the disk. Valid values are ARM64 or X86_64. Check the Architecture enum for the list of possible @@ -24900,6 +25132,33 @@ class Disk(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class AccessMode(proto.Enum): + r"""The access mode of the disk. - READ_WRITE_SINGLE: The default + AccessMode, means the disk can be attached to single instance in RW + mode. - READ_WRITE_MANY: The AccessMode means the disk can be + attached to multiple instances in RW mode. - READ_ONLY_MANY: The + AccessMode means the disk can be attached to multiple instances in + RO mode. The AccessMode is only valid for Hyperdisk disk types. + + Values: + UNDEFINED_ACCESS_MODE (0): + A value indicating that the enum field is not + set. + READ_ONLY_MANY (63460265): + The AccessMode means the disk can be attached + to multiple instances in RO mode. + READ_WRITE_MANY (488743208): + The AccessMode means the disk can be attached + to multiple instances in RW mode. + READ_WRITE_SINGLE (99323089): + The default AccessMode, means the disk can be + attached to single instance in RW mode. + """ + UNDEFINED_ACCESS_MODE = 0 + READ_ONLY_MANY = 63460265 + READ_WRITE_MANY = 488743208 + READ_WRITE_SINGLE = 99323089 + class Architecture(proto.Enum): r"""The architecture of the disk. Valid values are ARM64 or X86_64. @@ -24952,6 +25211,11 @@ class Status(proto.Enum): RESTORING = 404263851 UNAVAILABLE = 413756464 + access_mode: str = proto.Field( + proto.STRING, + number=41155486, + optional=True, + ) architecture: str = proto.Field( proto.STRING, number=302803283, @@ -28017,7 +28281,10 @@ class FirewallPolicyRule(proto.Message): Attributes: action (str): The Action to perform when the client connection triggers - the rule. Valid actions are "allow", "deny" and "goto_next". + the rule. Valid actions for firewall rules are: "allow", + "deny", "apply_security_profile_group" and "goto_next". + Valid actions for packet mirroring rules are: "mirror", + "do_not_mirror" and "goto_next". This field is a member of `oneof`_ ``_action``. description (str): @@ -28047,8 +28314,9 @@ class FirewallPolicyRule(proto.Message): This field is a member of `oneof`_ ``_enable_logging``. kind (str): - [Output only] Type of the resource. Always - compute#firewallPolicyRule for firewall policy rules + [Output only] Type of the resource. Returns + compute#firewallPolicyRule for firewall rules and + compute#packetMirroringRule for packet mirroring rules. This field is a member of `oneof`_ ``_kind``. match (google.cloud.compute_v1.types.FirewallPolicyRuleMatcher): @@ -28063,7 +28331,7 @@ class FirewallPolicyRule(proto.Message): value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the - lowest prority. + lowest priority. This field is a member of `oneof`_ ``_priority``. rule_name (str): @@ -28080,8 +28348,9 @@ class FirewallPolicyRule(proto.Message): A fully-qualified URL of a SecurityProfile resource instance. Example: https://networksecurity.googleapis.com/v1/projects/{project}/locations/{location}/securityProfileGroups/my-security-profile-group - Must be specified if action = 'apply_security_profile_group' - and cannot be specified for other actions. + Must be specified if action is one of + 'apply_security_profile_group' or 'mirror'. Cannot be + specified for other actions. This field is a member of `oneof`_ ``_security_profile_group``. target_resources (MutableSequence[str]): @@ -30124,7 +30393,8 @@ class GetGlobalOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to return. + Name of the Operations resource to return, or + its unique numeric identifier. project (str): Project ID for this request. """ @@ -30148,7 +30418,8 @@ class GetGlobalOrganizationOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to return. + Name of the Operations resource to return, or + its unique numeric identifier. parent_id (str): Parent ID for this request. @@ -32568,7 +32839,8 @@ class GetRegionOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to return. + Name of the Operations resource to return, or + its unique numeric identifier. project (str): Project ID for this request. region (str): @@ -33884,7 +34156,8 @@ class GetZoneOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to return. + Name of the Operations resource to return, or + its unique numeric identifier. project (str): Project ID for this request. zone (str): @@ -34290,6 +34563,8 @@ class Type(proto.Enum): No description available. SEV_SNP_CAPABLE (426919): No description available. + TDX_CAPABLE (240446133): + No description available. UEFI_COMPATIBLE (195865408): No description available. VIRTIO_SCSI_MULTIQUEUE (201597069): @@ -34307,6 +34582,7 @@ class Type(proto.Enum): SEV_LIVE_MIGRATABLE = 392039820 SEV_LIVE_MIGRATABLE_V2 = 168551983 SEV_SNP_CAPABLE = 426919 + TDX_CAPABLE = 240446133 UEFI_COMPATIBLE = 195865408 VIRTIO_SCSI_MULTIQUEUE = 201597069 WINDOWS = 456863331 @@ -34946,6 +35222,24 @@ class HealthCheck(proto.Message): [Output Only] Server-defined URL for the resource. This field is a member of `oneof`_ ``_self_link``. + source_regions (MutableSequence[str]): + The list of cloud regions from which health + checks are performed. If any regions are + specified, then exactly 3 regions should be + specified. The region names must be valid names + of Google Cloud regions. This can only be set + for global health check. If this list is + non-empty, then there are restrictions on what + other health check fields are supported and what + other resources can use this health check: - + SSL, HTTP2, and GRPC protocols are not + supported. - The TCP request field is not + supported. - The proxyHeader field for HTTP, + HTTPS, and TCP is not supported. - The + checkIntervalSec field must be at least 30. - + The health check cannot be used with + BackendService nor with managed instance group + auto-healing. ssl_health_check (google.cloud.compute_v1.types.SSLHealthCheck): This field is a member of `oneof`_ ``_ssl_health_check``. @@ -35085,6 +35379,10 @@ class Type(proto.Enum): number=456214797, optional=True, ) + source_regions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=405568475, + ) ssl_health_check: "SSLHealthCheck" = proto.Field( proto.MESSAGE, number=280032440, @@ -35675,6 +35973,15 @@ class HealthStatus(proto.Message): ip. This field is a member of `oneof`_ ``_ip_address``. + ipv6_address (str): + + This field is a member of `oneof`_ ``_ipv6_address``. + ipv6_health_state (str): + Health state of the IPv6 address of the + instance. Check the Ipv6HealthState enum for the + list of possible values. + + This field is a member of `oneof`_ ``_ipv6_health_state``. port (int): The named port of the instance group, not necessarily the port that is health-checked. @@ -35692,6 +35999,11 @@ class HealthStatus(proto.Message): class HealthState(proto.Enum): r"""Health state of the IPv4 address of the instance. + Additional supported values which may be not listed in the enum + directly due to technical reasons: + + HEALTHY + UNHEALTHY Values: UNDEFINED_HEALTH_STATE (0): @@ -35706,6 +36018,21 @@ class HealthState(proto.Enum): HEALTHY = 439801213 UNHEALTHY = 462118084 + class Ipv6HealthState(proto.Enum): + r"""Health state of the IPv6 address of the instance. + Additional supported values which may be not listed in the enum + directly due to technical reasons: + + HEALTHY + UNHEALTHY + + Values: + UNDEFINED_IPV6_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_IPV6_HEALTH_STATE = 0 + class WeightError(proto.Enum): r""" @@ -35776,6 +36103,16 @@ class WeightError(proto.Enum): number=406272220, optional=True, ) + ipv6_address: str = proto.Field( + proto.STRING, + number=341563804, + optional=True, + ) + ipv6_health_state: str = proto.Field( + proto.STRING, + number=190316614, + optional=True, + ) port: int = proto.Field( proto.INT32, number=3446913, @@ -35826,24 +36163,37 @@ class HealthStatusForNetworkEndpoint(proto.Message): list of possible values. This field is a member of `oneof`_ ``_health_state``. + ipv6_health_state (str): + Health state of the ipv6 network endpoint + determined based on the health checks + configured. Check the Ipv6HealthState enum for + the list of possible values. + + This field is a member of `oneof`_ ``_ipv6_health_state``. """ class HealthState(proto.Enum): r"""Health state of the network endpoint determined based on the - health checks configured. + health checks configured. Additional supported values which may + be not listed in the enum directly due to technical reasons: + + DRAINING + HEALTHY + UNHEALTHY + UNKNOWN Values: UNDEFINED_HEALTH_STATE (0): A value indicating that the enum field is not set. DRAINING (480455402): - Endpoint is being drained. + No description available. HEALTHY (439801213): - Endpoint is healthy. + No description available. UNHEALTHY (462118084): - Endpoint is unhealthy. + No description available. UNKNOWN (433141802): - Health status of the endpoint is unknown. + No description available. """ UNDEFINED_HEALTH_STATE = 0 DRAINING = 480455402 @@ -35851,6 +36201,23 @@ class HealthState(proto.Enum): UNHEALTHY = 462118084 UNKNOWN = 433141802 + class Ipv6HealthState(proto.Enum): + r"""Health state of the ipv6 network endpoint determined based on + the health checks configured. Additional supported values which + may be not listed in the enum directly due to technical reasons: + + DRAINING + HEALTHY + UNHEALTHY + UNKNOWN + + Values: + UNDEFINED_IPV6_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_IPV6_HEALTH_STATE = 0 + backend_service: "BackendServiceReference" = proto.Field( proto.MESSAGE, number=306946058, @@ -35880,6 +36247,11 @@ class HealthState(proto.Enum): number=324007150, optional=True, ) + ipv6_health_state: str = proto.Field( + proto.STRING, + number=190316614, + optional=True, + ) class Help(proto.Message): @@ -36737,6 +37109,40 @@ class HttpRouteRule(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + custom_error_response_policy (google.cloud.compute_v1.types.CustomErrorResponsePolicy): + customErrorResponsePolicy specifies how the Load Balancer + returns error responses when BackendServiceor BackendBucket + responds with an error. If a policy for an error code is not + configured for the RouteRule, a policy for the error code + configured in pathMatcher.defaultCustomErrorResponsePolicy + is applied. If one is not specified in + pathMatcher.defaultCustomErrorResponsePolicy, the policy + configured in UrlMap.defaultCustomErrorResponsePolicy takes + effect. For example, consider a UrlMap with the following + configuration: - UrlMap.defaultCustomErrorResponsePolicy are + configured with policies for 5xx and 4xx errors - A + RouteRule for /coming_soon/ is configured for the error code + 404. If the request is for www.myotherdomain.com and a 404 + is encountered, the policy under + UrlMap.defaultCustomErrorResponsePolicy takes effect. If a + 404 response is encountered for the request + www.example.com/current_events/, the pathMatcher's policy + takes effect. If however, the request for + www.example.com/coming_soon/ encounters a 404, the policy in + RouteRule.customErrorResponsePolicy takes effect. If any of + the requests in this example encounter a 500 error code, the + policy at UrlMap.defaultCustomErrorResponsePolicy takes + effect. When used in conjunction with + routeRules.routeAction.retryPolicy, retries take precedence. + Only once all retries are exhausted, the + customErrorResponsePolicy is applied. While attempting a + retry, if load balancer is successful in reaching the + service, the customErrorResponsePolicy is ignored and the + response from the service is returned to the client. + customErrorResponsePolicy is supported only for global + external Application Load Balancers. + + This field is a member of `oneof`_ ``_custom_error_response_policy``. description (str): The short description conveying the intent of this routeRule. The description can have a @@ -36825,6 +37231,12 @@ class HttpRouteRule(proto.Message): This field is a member of `oneof`_ ``_url_redirect``. """ + custom_error_response_policy: "CustomErrorResponsePolicy" = proto.Field( + proto.MESSAGE, + number=202816619, + optional=True, + message="CustomErrorResponsePolicy", + ) description: str = proto.Field( proto.STRING, number=422937596, @@ -42804,12 +43216,18 @@ class InstanceGroupManager(proto.Message): The autohealing policy for this managed instance group. You can specify only one value. base_instance_name (str): - The base instance name to use for instances - in this group. The value must be 1-58 characters - long. Instances are named by appending a hyphen - and a random four-character string to the base - instance name. The base instance name must - comply with RFC1035. + The base instance name is a prefix that you want to attach + to the names of all VMs in a MIG. The maximum character + length is 58 and the name must comply with RFC1035 format. + When a VM is created in the group, the MIG appends a hyphen + and a random four-character string to the base instance + name. If you want the MIG to assign sequential numbers + instead of a random string, then end the base instance name + with a hyphen followed by one or more hash symbols. The hash + symbols indicate the number of digits. For example, a base + instance name of "vm-###" results in "vm-001" as a VM name. + @pattern + `a-z <([-a-z0-9]{0,57})|([-a-z0-9]{0,51}-#{1,10}(\[[0-9]{1,10}\])?)>`__ This field is a member of `oneof`_ ``_base_instance_name``. creation_timestamp (str): @@ -42850,6 +43268,13 @@ class InstanceGroupManager(proto.Message): The server generates this identifier. This field is a member of `oneof`_ ``_id``. + instance_flexibility_policy (google.cloud.compute_v1.types.InstanceGroupManagerInstanceFlexibilityPolicy): + Instance flexibility allowing MIG to create + VMs from multiple types of machines. Instance + flexibility configuration on MIG overrides + instance template configuration. + + This field is a member of `oneof`_ ``_instance_flexibility_policy``. instance_group (str): [Output Only] The URL of the Instance Group resource. @@ -42890,14 +43315,21 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_name``. named_ports (MutableSequence[google.cloud.compute_v1.types.NamedPort]): - Named ports configured for the Instance - Groups complementary to this Instance Group - Manager. + [Output Only] Named ports configured on the Instance Groups + complementary to this Instance Group Manager. region (str): [Output Only] The URL of the region where the managed instance group resides (for regional resources). This field is a member of `oneof`_ ``_region``. + satisfies_pzi (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzi``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): [Output Only] The URL for this managed instance group. The server defines this URL. @@ -43023,6 +43455,14 @@ class ListManagedInstancesResults(proto.Enum): number=3355, optional=True, ) + instance_flexibility_policy: "InstanceGroupManagerInstanceFlexibilityPolicy" = ( + proto.Field( + proto.MESSAGE, + number=26937090, + optional=True, + message="InstanceGroupManagerInstanceFlexibilityPolicy", + ) + ) instance_group: str = proto.Field( proto.STRING, number=81095253, @@ -43066,6 +43506,16 @@ class ListManagedInstancesResults(proto.Enum): number=138946292, optional=True, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=480964257, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) self_link: str = proto.Field( proto.STRING, number=456214797, @@ -43427,6 +43877,58 @@ class InstanceGroupManagerAutoHealingPolicy(proto.Message): ) +class InstanceGroupManagerInstanceFlexibilityPolicy(proto.Message): + r""" + + Attributes: + instance_selections (MutableMapping[str, google.cloud.compute_v1.types.InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection]): + Named instance selections configuring + properties that the group will use when creating + new VMs. + """ + + instance_selections: MutableMapping[ + str, "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=22954577, + message="InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection", + ) + + +class InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_types (MutableSequence[str]): + Full machine-type names, e.g. + "n1-standard-16". + rank (int): + Preference of this instance selection. Lower + number means higher preference. MIG will first + try to create a VM based on the machine-type + with lowest rank and fallback to next rank based + on availability. Machine types and instance + selections with the same rank have the same + preference. + + This field is a member of `oneof`_ ``_rank``. + """ + + machine_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=79720065, + ) + rank: int = proto.Field( + proto.INT32, + number=3492908, + optional=True, + ) + + class InstanceGroupManagerInstanceLifecyclePolicy(proto.Message): r""" @@ -43639,7 +44141,8 @@ class InstanceGroupManagerResizeRequest(proto.Message): resize_by (int): The number of instances to be created by this resize request. The group's target size will be - increased by this number. + increased by this number. This field cannot be + used together with 'instances'. This field is a member of `oneof`_ ``_resize_by``. self_link (str): @@ -45546,8 +46049,12 @@ class InstanceProperties(proto.Message): Labels to apply to instances that are created from these properties. machine_type (str): - The machine type to use for instances that - are created from these properties. + The machine type to use for instances that are created from + these properties. This field only accepts a machine type + name, for example ``n2-standard-4``. If you use the machine + type full or partial URL, for example + ``projects/my-l7ilb-project/zones/us-central1-a/machineTypes/n2-standard-4``, + the request will result in an ``INTERNAL_ERROR``. This field is a member of `oneof`_ ``_machine_type``. metadata (google.cloud.compute_v1.types.Metadata): @@ -46373,7 +46880,7 @@ class InstancesGetEffectiveFirewallsResponse(proto.Message): Attributes: firewall_policys (MutableSequence[google.cloud.compute_v1.types.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): - Effective firewalls from firewall policies. + [Output Only] Effective firewalls from firewall policies. firewalls (MutableSequence[google.cloud.compute_v1.types.Firewall]): Effective firewalls on the instance. """ @@ -46407,8 +46914,16 @@ class InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Messag [Output Only] The name of the firewall policy. This field is a member of `oneof`_ ``_name``. + priority (int): + [Output only] Priority of firewall policy association. Not + applicable for type=HIERARCHY. + + This field is a member of `oneof`_ ``_priority``. rules (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRule]): - The rules that apply to the network. + [Output Only] The rules that apply to the instance. Only + rules that target the specific VM instance are returned if + target service accounts or target secure tags are specified + in the rules. short_name (str): [Output Only] The short name of the firewall policy. @@ -46437,6 +46952,10 @@ class Type(proto.Enum): No description available. NETWORK_REGIONAL (190804272): No description available. + SYSTEM_GLOBAL (60099507): + No description available. + SYSTEM_REGIONAL (161777199): + No description available. UNSPECIFIED (526786327): No description available. """ @@ -46444,6 +46963,8 @@ class Type(proto.Enum): HIERARCHY = 69902869 NETWORK = 413984270 NETWORK_REGIONAL = 190804272 + SYSTEM_GLOBAL = 60099507 + SYSTEM_REGIONAL = 161777199 UNSPECIFIED = 526786327 display_name: str = proto.Field( @@ -46456,6 +46977,11 @@ class Type(proto.Enum): number=3373707, optional=True, ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) rules: MutableSequence["FirewallPolicyRule"] = proto.RepeatedField( proto.MESSAGE, number=108873975, @@ -47254,12 +47780,12 @@ class Interconnect(proto.Message): available_features (MutableSequence[str]): [Output only] List of features available for this Interconnect connection, which can take one of the following - values: - MACSEC If present then the Interconnect connection - is provisioned on MACsec capable hardware ports. If not - present then the Interconnect connection is provisioned on - non-MACsec capable ports and MACsec isn't supported and - enabling MACsec fails. Check the AvailableFeatures enum for - the list of possible values. + values: - IF_MACSEC If present then the Interconnect + connection is provisioned on MACsec capable hardware ports. + If not present then the Interconnect connection is + provisioned on non-MACsec capable ports and MACsec isn't + supported and enabling MACsec fails. Check the + AvailableFeatures enum for the list of possible values. circuit_infos (MutableSequence[google.cloud.compute_v1.types.InterconnectCircuitInfo]): [Output Only] A list of CircuitInfo objects, that describe the individual circuits in this LAG. @@ -47420,16 +47946,14 @@ class Interconnect(proto.Message): This field is a member of `oneof`_ ``_remote_location``. requested_features (MutableSequence[str]): - Optional. List of features requested for this - Interconnect connection, which can take one of - the following values: - MACSEC If specified then - the connection is created on MACsec capable - hardware ports. If not specified, the default - value is false, which allocates non-MACsec - capable ports first if available. This parameter - can be provided only with Interconnect INSERT. - It isn't valid for Interconnect PATCH. Check the - RequestedFeatures enum for the list of possible + Optional. List of features requested for this Interconnect + connection, which can take one of the following values: - + IF_MACSEC If specified then the connection is created on + MACsec capable hardware ports. If not specified, the default + value is false, which allocates non-MACsec capable ports + first if available. This parameter can be provided only with + Interconnect INSERT. It isn't valid for Interconnect PATCH. + Check the RequestedFeatures enum for the list of possible values. requested_link_count (int): Target number of physical links in the link @@ -66360,212 +66884,6 @@ class LocationPolicyLocationConstraints(proto.Message): ) -class LogConfig(proto.Message): - r"""This is deprecated and has no effect. Do not use. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - cloud_audit (google.cloud.compute_v1.types.LogConfigCloudAuditOptions): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_cloud_audit``. - counter (google.cloud.compute_v1.types.LogConfigCounterOptions): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_counter``. - data_access (google.cloud.compute_v1.types.LogConfigDataAccessOptions): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_data_access``. - """ - - cloud_audit: "LogConfigCloudAuditOptions" = proto.Field( - proto.MESSAGE, - number=412852561, - optional=True, - message="LogConfigCloudAuditOptions", - ) - counter: "LogConfigCounterOptions" = proto.Field( - proto.MESSAGE, - number=420959740, - optional=True, - message="LogConfigCounterOptions", - ) - data_access: "LogConfigDataAccessOptions" = proto.Field( - proto.MESSAGE, - number=286633881, - optional=True, - message="LogConfigDataAccessOptions", - ) - - -class LogConfigCloudAuditOptions(proto.Message): - r"""This is deprecated and has no effect. Do not use. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - authorization_logging_options (google.cloud.compute_v1.types.AuthorizationLoggingOptions): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_authorization_logging_options``. - log_name (str): - This is deprecated and has no effect. Do not - use. Check the LogName enum for the list of - possible values. - - This field is a member of `oneof`_ ``_log_name``. - """ - - class LogName(proto.Enum): - r"""This is deprecated and has no effect. Do not use. - - Values: - UNDEFINED_LOG_NAME (0): - A value indicating that the enum field is not - set. - ADMIN_ACTIVITY (427503135): - This is deprecated and has no effect. Do not - use. - DATA_ACCESS (238070681): - This is deprecated and has no effect. Do not - use. - UNSPECIFIED_LOG_NAME (410515182): - This is deprecated and has no effect. Do not - use. - """ - UNDEFINED_LOG_NAME = 0 - ADMIN_ACTIVITY = 427503135 - DATA_ACCESS = 238070681 - UNSPECIFIED_LOG_NAME = 410515182 - - authorization_logging_options: "AuthorizationLoggingOptions" = proto.Field( - proto.MESSAGE, - number=217861624, - optional=True, - message="AuthorizationLoggingOptions", - ) - log_name: str = proto.Field( - proto.STRING, - number=402913958, - optional=True, - ) - - -class LogConfigCounterOptions(proto.Message): - r"""This is deprecated and has no effect. Do not use. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - custom_fields (MutableSequence[google.cloud.compute_v1.types.LogConfigCounterOptionsCustomField]): - This is deprecated and has no effect. Do not - use. - field (str): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_field``. - metric (str): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_metric``. - """ - - custom_fields: MutableSequence[ - "LogConfigCounterOptionsCustomField" - ] = proto.RepeatedField( - proto.MESSAGE, - number=249651015, - message="LogConfigCounterOptionsCustomField", - ) - field: str = proto.Field( - proto.STRING, - number=97427706, - optional=True, - ) - metric: str = proto.Field( - proto.STRING, - number=533067184, - optional=True, - ) - - -class LogConfigCounterOptionsCustomField(proto.Message): - r"""This is deprecated and has no effect. Do not use. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_name``. - value (str): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_value``. - """ - - name: str = proto.Field( - proto.STRING, - number=3373707, - optional=True, - ) - value: str = proto.Field( - proto.STRING, - number=111972721, - optional=True, - ) - - -class LogConfigDataAccessOptions(proto.Message): - r"""This is deprecated and has no effect. Do not use. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - log_mode (str): - This is deprecated and has no effect. Do not - use. Check the LogMode enum for the list of - possible values. - - This field is a member of `oneof`_ ``_log_mode``. - """ - - class LogMode(proto.Enum): - r"""This is deprecated and has no effect. Do not use. - - Values: - UNDEFINED_LOG_MODE (0): - A value indicating that the enum field is not - set. - LOG_FAIL_CLOSED (360469778): - This is deprecated and has no effect. Do not - use. - LOG_MODE_UNSPECIFIED (88160822): - This is deprecated and has no effect. Do not - use. - """ - UNDEFINED_LOG_MODE = 0 - LOG_FAIL_CLOSED = 360469778 - LOG_MODE_UNSPECIFIED = 88160822 - - log_mode: str = proto.Field( - proto.STRING, - number=402897342, - optional=True, - ) - - class MachineImage(proto.Message): r"""Represents a machine image resource. A machine image is a Compute Engine resource that stores all the configuration, @@ -66900,6 +67218,11 @@ class MachineType(proto.Message): accelerators (MutableSequence[google.cloud.compute_v1.types.Accelerators]): [Output Only] A list of accelerator configurations assigned to this machine type. + architecture (str): + [Output Only] The architecture of the machine type. Check + the Architecture enum for the list of possible values. + + This field is a member of `oneof`_ ``_architecture``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. @@ -66972,11 +67295,36 @@ class MachineType(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class Architecture(proto.Enum): + r"""[Output Only] The architecture of the machine type. + + Values: + UNDEFINED_ARCHITECTURE (0): + A value indicating that the enum field is not + set. + ARCHITECTURE_UNSPECIFIED (394750507): + Default value indicating Architecture is not + set. + ARM64 (62547450): + Machines with architecture ARM64 + X86_64 (425300551): + Machines with architecture X86_64 + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + accelerators: MutableSequence["Accelerators"] = proto.RepeatedField( proto.MESSAGE, number=269577064, message="Accelerators", ) + architecture: str = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -67312,6 +67660,11 @@ class ManagedInstance(proto.Message): policy for this instance. This field is a member of `oneof`_ ``_preserved_state_from_policy``. + properties_from_flexibility_policy (google.cloud.compute_v1.types.ManagedInstancePropertiesFromFlexibilityPolicy): + [Output Only] Instance properties selected for this instance + resulting from InstanceFlexibilityPolicy. + + This field is a member of `oneof`_ ``_properties_from_flexibility_policy``. version (google.cloud.compute_v1.types.ManagedInstanceVersion): [Output Only] Intended version of this instance. @@ -67510,6 +67863,12 @@ class InstanceStatus(proto.Enum): optional=True, message="PreservedState", ) + properties_from_flexibility_policy: "ManagedInstancePropertiesFromFlexibilityPolicy" = proto.Field( + proto.MESSAGE, + number=155525825, + optional=True, + message="ManagedInstancePropertiesFromFlexibilityPolicy", + ) version: "ManagedInstanceVersion" = proto.Field( proto.MESSAGE, number=351608024, @@ -67607,6 +67966,26 @@ class ManagedInstanceLastAttempt(proto.Message): ) +class ManagedInstancePropertiesFromFlexibilityPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_type (str): + The machine type to be used for this + instance. + + This field is a member of `oneof`_ ``_machine_type``. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=227711026, + optional=True, + ) + + class ManagedInstanceVersion(proto.Message): r""" @@ -69188,6 +69567,12 @@ class NetworkEndpoint(proto.Message): annotations (MutableMapping[str, str]): Metadata defined as annotations on the network endpoint. + client_destination_port (int): + Represents the port number to which PSC consumer sends + packets. Only valid for network endpoint groups created with + GCE_VM_IP_PORTMAP endpoint type. + + This field is a member of `oneof`_ ``_client_destination_port``. fqdn (str): Optional fully qualified domain name of network endpoint. This can only be specified when @@ -69222,6 +69607,10 @@ class NetworkEndpoint(proto.Message): subnetwork. This field is a member of `oneof`_ ``_ip_address``. + ipv6_address (str): + Optional IPv6 address of network endpoint. + + This field is a member of `oneof`_ ``_ipv6_address``. port (int): Optional port number of network endpoint. If not specified, the defaultPort for the network endpoint group will be used. @@ -69236,6 +69625,11 @@ class NetworkEndpoint(proto.Message): proto.STRING, number=112032548, ) + client_destination_port: int = proto.Field( + proto.INT32, + number=123765766, + optional=True, + ) fqdn: str = proto.Field( proto.STRING, number=3150485, @@ -69251,6 +69645,11 @@ class NetworkEndpoint(proto.Message): number=406272220, optional=True, ) + ipv6_address: str = proto.Field( + proto.STRING, + number=341563804, + optional=True, + ) port: int = proto.Field( proto.INT32, number=3446913, @@ -69395,6 +69794,9 @@ class NetworkEndpointType(proto.Enum): GCE_VM_IP_PORT (501838375): The network endpoint is represented by IP address and port pair. + GCE_VM_IP_PORTMAP (22819253): + The network endpoint is represented by an IP, + Port and Client Destination Port. INTERNET_FQDN_PORT (404154477): The network endpoint is represented by fully qualified domain name and port. @@ -69417,6 +69819,7 @@ class NetworkEndpointType(proto.Enum): UNDEFINED_NETWORK_ENDPOINT_TYPE = 0 GCE_VM_IP = 401880793 GCE_VM_IP_PORT = 501838375 + GCE_VM_IP_PORTMAP = 22819253 INTERNET_FQDN_PORT = 404154477 INTERNET_IP_PORT = 477719963 NON_GCP_PRIVATE_IP_PORT = 336447968 @@ -69862,6 +70265,12 @@ class NetworkEndpointGroupPscData(proto.Message): it to act as an endpoint in L7 PSC-XLB. This field is a member of `oneof`_ ``_consumer_psc_address``. + producer_port (int): + The psc producer port is used to connect PSC NEG with + specific port on the PSC Producer side; should only be used + for the PRIVATE_SERVICE_CONNECT NEG type + + This field is a member of `oneof`_ ``_producer_port``. psc_connection_id (int): [Output Only] The PSC connection id of the PSC Network Endpoint Group Consumer. @@ -69916,6 +70325,11 @@ class PscConnectionStatus(proto.Enum): number=452646572, optional=True, ) + producer_port: int = proto.Field( + proto.INT32, + number=410021134, + optional=True, + ) psc_connection_id: int = proto.Field( proto.UINT64, number=292082397, @@ -70301,6 +70715,8 @@ class NicType(proto.Enum): set. GVNIC (68209305): GVNIC + IDPF (2242641): + IDPF UNSPECIFIED_NIC_TYPE (67411801): No type specified. VIRTIO_NET (452123481): @@ -70308,6 +70724,7 @@ class NicType(proto.Enum): """ UNDEFINED_NIC_TYPE = 0 GVNIC = 68209305 + IDPF = 2242641 UNSPECIFIED_NIC_TYPE = 67411801 VIRTIO_NET = 452123481 @@ -70326,14 +70743,18 @@ class StackType(proto.Enum): The network interface can have both IPv4 and IPv6 addresses. IPV4_ONLY (22373798): - The network interface will be assigned IPv4 - address. + The network interface will only be assigned + IPv4 addresses. + IPV6_ONLY (79632100): + The network interface will only be assigned + IPv6 addresses. UNSPECIFIED_STACK_TYPE (298084569): No description available. """ UNDEFINED_STACK_TYPE = 0 IPV4_IPV6 = 22197249 IPV4_ONLY = 22373798 + IPV6_ONLY = 79632100 UNSPECIFIED_STACK_TYPE = 298084569 access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( @@ -70849,7 +71270,11 @@ class NetworksGetEffectiveFirewallsResponse(proto.Message): Attributes: firewall_policys (MutableSequence[google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): - Effective firewalls from firewall policy. + [Output Only] Effective firewalls from firewall policy. It + returns Global Network Firewall Policies and Hierarchical + Firewall Policies. Use + regionNetworkFirewallPolicies.getEffectiveFirewalls to get + Regional Network Firewall Policies as well. firewalls (MutableSequence[google.cloud.compute_v1.types.Firewall]): Effective firewalls on the network. """ @@ -70883,8 +71308,13 @@ class NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Message [Output Only] The name of the firewall policy. This field is a member of `oneof`_ ``_name``. + priority (int): + [Output only] Priority of firewall policy association. Not + applicable for type=HIERARCHY. + + This field is a member of `oneof`_ ``_priority``. rules (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRule]): - The rules that apply to the network. + [Output Only] The rules that apply to the network. short_name (str): [Output Only] The short name of the firewall policy. @@ -70907,12 +71337,15 @@ class Type(proto.Enum): No description available. NETWORK (413984270): No description available. + SYSTEM (313484847): + No description available. UNSPECIFIED (526786327): No description available. """ UNDEFINED_TYPE = 0 HIERARCHY = 69902869 NETWORK = 413984270 + SYSTEM = 313484847 UNSPECIFIED = 526786327 display_name: str = proto.Field( @@ -70925,6 +71358,11 @@ class Type(proto.Enum): number=3373707, optional=True, ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) rules: MutableSequence["FirewallPolicyRule"] = proto.RepeatedField( proto.MESSAGE, number=108873975, @@ -73046,8 +73484,7 @@ class Operation(proto.Message): target_link (str): [Output Only] The URL of the resource that the operation modifies. For operations related to creating a snapshot, - this points to the persistent disk that the snapshot was - created from. + this points to the disk that the snapshot was created from. This field is a member of `oneof`_ ``_target_link``. user (str): @@ -77313,6 +77750,42 @@ class PathMatcher(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + default_custom_error_response_policy (google.cloud.compute_v1.types.CustomErrorResponsePolicy): + defaultCustomErrorResponsePolicy specifies how the Load + Balancer returns error responses when BackendServiceor + BackendBucket responds with an error. This policy takes + effect at the PathMatcher level and applies only when no + policy has been defined for the error code at lower levels + like RouteRule and PathRule within this PathMatcher. If an + error code does not have a policy defined in + defaultCustomErrorResponsePolicy, then a policy defined for + the error code in UrlMap.defaultCustomErrorResponsePolicy + takes effect. For example, consider a UrlMap with the + following configuration: - + UrlMap.defaultCustomErrorResponsePolicy is configured with + policies for 5xx and 4xx errors - A RouteRule for + /coming_soon/ is configured for the error code 404. If the + request is for www.myotherdomain.com and a 404 is + encountered, the policy under + UrlMap.defaultCustomErrorResponsePolicy takes effect. If a + 404 response is encountered for the request + www.example.com/current_events/, the pathMatcher's policy + takes effect. If however, the request for + www.example.com/coming_soon/ encounters a 404, the policy in + RouteRule.customErrorResponsePolicy takes effect. If any of + the requests in this example encounter a 500 error code, the + policy at UrlMap.defaultCustomErrorResponsePolicy takes + effect. When used in conjunction with + pathMatcher.defaultRouteAction.retryPolicy, retries take + precedence. Only once all retries are exhausted, the + defaultCustomErrorResponsePolicy is applied. While + attempting a retry, if load balancer is successful in + reaching the service, the defaultCustomErrorResponsePolicy + is ignored and the response from the service is returned to + the client. defaultCustomErrorResponsePolicy is supported + only for global external Application Load Balancers. + + This field is a member of `oneof`_ ``_default_custom_error_response_policy``. default_route_action (google.cloud.compute_v1.types.HttpRouteAction): defaultRouteAction takes effect when none of the pathRules or routeRules match. The load @@ -77410,6 +77883,12 @@ class PathMatcher(proto.Message): pathRules or routeRules. """ + default_custom_error_response_policy: "CustomErrorResponsePolicy" = proto.Field( + proto.MESSAGE, + number=81266089, + optional=True, + message="CustomErrorResponsePolicy", + ) default_route_action: "HttpRouteAction" = proto.Field( proto.MESSAGE, number=378919466, @@ -77464,6 +77943,33 @@ class PathRule(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + custom_error_response_policy (google.cloud.compute_v1.types.CustomErrorResponsePolicy): + customErrorResponsePolicy specifies how the Load Balancer + returns error responses when BackendServiceor BackendBucket + responds with an error. If a policy for an error code is not + configured for the PathRule, a policy for the error code + configured in pathMatcher.defaultCustomErrorResponsePolicy + is applied. If one is not specified in + pathMatcher.defaultCustomErrorResponsePolicy, the policy + configured in UrlMap.defaultCustomErrorResponsePolicy takes + effect. For example, consider a UrlMap with the following + configuration: - UrlMap.defaultCustomErrorResponsePolicy are + configured with policies for 5xx and 4xx errors - A PathRule + for /coming_soon/ is configured for the error code 404. If + the request is for www.myotherdomain.com and a 404 is + encountered, the policy under + UrlMap.defaultCustomErrorResponsePolicy takes effect. If a + 404 response is encountered for the request + www.example.com/current_events/, the pathMatcher's policy + takes effect. If however, the request for + www.example.com/coming_soon/ encounters a 404, the policy in + PathRule.customErrorResponsePolicy takes effect. If any of + the requests in this example encounter a 500 error code, the + policy at UrlMap.defaultCustomErrorResponsePolicy takes + effect. customErrorResponsePolicy is supported only for + global external Application Load Balancers. + + This field is a member of `oneof`_ ``_custom_error_response_policy``. paths (MutableSequence[str]): The list of path patterns to match. Each must start with / and the only place a \* is allowed is at the end following a @@ -77510,6 +78016,12 @@ class PathRule(proto.Message): This field is a member of `oneof`_ ``_url_redirect``. """ + custom_error_response_policy: "CustomErrorResponsePolicy" = proto.Field( + proto.MESSAGE, + number=202816619, + optional=True, + message="CustomErrorResponsePolicy", + ) paths: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=106438894, @@ -77813,9 +78325,6 @@ class Policy(proto.Message): iam_owned (bool): This field is a member of `oneof`_ ``_iam_owned``. - rules (MutableSequence[google.cloud.compute_v1.types.Rule]): - This is deprecated and has no effect. Do not - use. version (int): Specifies the format of the policy. Valid values are ``0``, ``1``, and ``3``. Requests that specify an invalid value are @@ -77861,11 +78370,6 @@ class Policy(proto.Message): number=450566203, optional=True, ) - rules: MutableSequence["Rule"] = proto.RepeatedField( - proto.MESSAGE, - number=108873975, - message="Rule", - ) version: int = proto.Field( proto.INT32, number=351608024, @@ -81924,7 +82428,11 @@ class RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse(proto.Message): Attributes: firewall_policys (MutableSequence[google.cloud.compute_v1.types.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): - Effective firewalls from firewall policy. + [Output only] Effective firewalls from firewall policy. It + applies to Regional Network Firewall Policies in the + specified region, Global Network Firewall Policies and + Hierachial Firewall Policies which are associated with the + network. firewalls (MutableSequence[google.cloud.compute_v1.types.Firewall]): Effective firewalls on the network. """ @@ -81960,7 +82468,7 @@ class RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewal This field is a member of `oneof`_ ``_name``. rules (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRule]): - The rules that apply to the network. + [Output only] The rules that apply to the network. type_ (str): [Output Only] The type of the firewall policy. Can be one of HIERARCHY, NETWORK, NETWORK_REGIONAL, SYSTEM_GLOBAL, @@ -83088,8 +83596,13 @@ class Reservation(proto.Message): This field is a member of `oneof`_ ``_specific_reservation_required``. status (str): - [Output Only] The status of the reservation. Check the - Status enum for the list of possible values. + [Output Only] The status of the reservation. - CREATING: + Reservation resources are being allocated. - READY: + Reservation resources have been allocated, and the + reservation is ready for use. - DELETING: Reservation + deletion is in progress. - UPDATING: Reservation update is + in progress. Check the Status enum for the list of possible + values. This field is a member of `oneof`_ ``_status``. zone (str): @@ -83101,23 +83614,27 @@ class Reservation(proto.Message): """ class Status(proto.Enum): - r"""[Output Only] The status of the reservation. + r"""[Output Only] The status of the reservation. - CREATING: Reservation + resources are being allocated. - READY: Reservation resources have + been allocated, and the reservation is ready for use. - DELETING: + Reservation deletion is in progress. - UPDATING: Reservation update + is in progress. Values: UNDEFINED_STATUS (0): A value indicating that the enum field is not set. CREATING (455564985): - Resources are being allocated for the - reservation. + Reservation resources are being allocated. DELETING (528602024): - Reservation is currently being deleted. + Reservation deletion is in progress. INVALID (530283991): No description available. READY (77848963): - Reservation has allocated all its resources. + Reservation resources have been allocated, + and the reservation is ready for use. UPDATING (494614342): - Reservation is currently being resized. + Reservation update is in progress. """ UNDEFINED_STATUS = 0 CREATING = 455564985 @@ -84914,6 +85431,9 @@ class ResourceStatus(proto.Message): running. This field is a member of `oneof`_ ``_physical_host``. + scheduling (google.cloud.compute_v1.types.ResourceStatusScheduling): + + This field is a member of `oneof`_ ``_scheduling``. upcoming_maintenance (google.cloud.compute_v1.types.UpcomingMaintenance): This field is a member of `oneof`_ ``_upcoming_maintenance``. @@ -84924,6 +85444,12 @@ class ResourceStatus(proto.Message): number=464370704, optional=True, ) + scheduling: "ResourceStatusScheduling" = proto.Field( + proto.MESSAGE, + number=386688404, + optional=True, + message="ResourceStatusScheduling", + ) upcoming_maintenance: "UpcomingMaintenance" = proto.Field( proto.MESSAGE, number=227348592, @@ -84932,6 +85458,29 @@ class ResourceStatus(proto.Message): ) +class ResourceStatusScheduling(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + availability_domain (int): + Specifies the availability domain to place + the instance in. The value must be a number + between 1 and the number of availability domains + specified in the spread placement policy + attached to the instance. + + This field is a member of `oneof`_ ``_availability_domain``. + """ + + availability_domain: int = proto.Field( + proto.INT32, + number=252514344, + optional=True, + ) + + class ResumeInstanceRequest(proto.Message): r"""A request message for Instances.Resume. See the method description for details. @@ -85060,10 +85609,19 @@ class Route(proto.Message): loadBalancingScheme=INTERNAL that should handle matching packets or the IP address of the forwarding Rule. For example, the following are - all valid URLs: - 10.128.0.56 - + all valid URLs: - https://www.googleapis.com/compute/v1/projects/project/regions/region /forwardingRules/forwardingRule - - regions/region/forwardingRules/forwardingRule + regions/region/forwardingRules/forwardingRule If + an IP address is provided, must specify an IPv4 + address in dot-decimal notation or an IPv6 + address in RFC 4291 format. For example, the + following are all valid IP addresses: - + 10.128.0.56 - 2001:db8::2d9:51:0:0 - + 2001:db8:0:0:2d9:51:0:0 IPv6 addresses will be + displayed using RFC 5952 compressed format (e.g. + 2001:db8::2d9:51:0:0). Should never be an + IPv4-mapped IPv6 address. This field is a member of `oneof`_ ``_next_hop_ilb``. next_hop_instance (str): @@ -87092,12 +87650,12 @@ class RouterNatRule(proto.Message): egress traffic from a VM is evaluated against. If it evaluates to true, the corresponding ``action`` is enforced. The following examples are valid match expressions for - public NAT: "inIpRange(destination.ip, '1.1.0.0/16') \|\| - inIpRange(destination.ip, '2.2.0.0/16')" "destination.ip == - '1.1.0.1' \|\| destination.ip == '8.8.8.8'" The following - example is a valid match expression for private NAT: - "nexthop.hub == - '//networkconnectivity.googleapis.com/projects/my-project/locations/global/hubs/hub-1'". + public NAT: + ``inIpRange(destination.ip, '1.1.0.0/16') || inIpRange(destination.ip, '2.2.0.0/16')`` + ``destination.ip == '1.1.0.1' || destination.ip == '8.8.8.8'`` + The following example is a valid match expression for + private NAT: + ``nexthop.hub == '//networkconnectivity.googleapis.com/projects/my-project/locations/global/hubs/hub-1'`` This field is a member of `oneof`_ ``_match``. rule_number (int): @@ -87247,9 +87805,22 @@ class RouterStatus(proto.Message): Attributes: best_routes (MutableSequence[google.cloud.compute_v1.types.Route]): - Best routes for this router's network. + A list of the best dynamic routes for this + Cloud Router's Virtual Private Cloud (VPC) + network in the same region as this Cloud Router. + Lists all of the best routes per prefix that are + programmed into this region's VPC data plane. + When global dynamic routing mode is turned on in + the VPC network, this list can include + cross-region dynamic routes from Cloud Routers + in other regions. best_routes_for_router (MutableSequence[google.cloud.compute_v1.types.Route]): - Best routes learned by this router. + A list of the best BGP routes learned by this + Cloud Router. It is possible that routes listed + might not be programmed into the data plane, if + the Google Cloud control plane finds a more + optimal route for a prefix than a route learned + by this Cloud Router. bgp_peer_status (MutableSequence[google.cloud.compute_v1.types.RouterStatusBgpPeerStatus]): nat_status (MutableSequence[google.cloud.compute_v1.types.RouterStatusNatStatus]): @@ -87759,108 +88330,6 @@ class RoutersScopedList(proto.Message): ) -class Rule(proto.Message): - r"""This is deprecated and has no effect. Do not use. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - action (str): - This is deprecated and has no effect. Do not - use. Check the Action enum for the list of - possible values. - - This field is a member of `oneof`_ ``_action``. - conditions (MutableSequence[google.cloud.compute_v1.types.Condition]): - This is deprecated and has no effect. Do not - use. - description (str): - This is deprecated and has no effect. Do not - use. - - This field is a member of `oneof`_ ``_description``. - ins (MutableSequence[str]): - This is deprecated and has no effect. Do not - use. - log_configs (MutableSequence[google.cloud.compute_v1.types.LogConfig]): - This is deprecated and has no effect. Do not - use. - not_ins (MutableSequence[str]): - This is deprecated and has no effect. Do not - use. - permissions (MutableSequence[str]): - This is deprecated and has no effect. Do not - use. - """ - - class Action(proto.Enum): - r"""This is deprecated and has no effect. Do not use. - - Values: - UNDEFINED_ACTION (0): - A value indicating that the enum field is not - set. - ALLOW (62368553): - This is deprecated and has no effect. Do not - use. - ALLOW_WITH_LOG (76034177): - This is deprecated and has no effect. Do not - use. - DENY (2094604): - This is deprecated and has no effect. Do not - use. - DENY_WITH_LOG (351433982): - This is deprecated and has no effect. Do not - use. - LOG (75556): - This is deprecated and has no effect. Do not - use. - NO_ACTION (260643444): - This is deprecated and has no effect. Do not - use. - """ - UNDEFINED_ACTION = 0 - ALLOW = 62368553 - ALLOW_WITH_LOG = 76034177 - DENY = 2094604 - DENY_WITH_LOG = 351433982 - LOG = 75556 - NO_ACTION = 260643444 - - action: str = proto.Field( - proto.STRING, - number=187661878, - optional=True, - ) - conditions: MutableSequence["Condition"] = proto.RepeatedField( - proto.MESSAGE, - number=142882488, - message="Condition", - ) - description: str = proto.Field( - proto.STRING, - number=422937596, - optional=True, - ) - ins: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=104430, - ) - log_configs: MutableSequence["LogConfig"] = proto.RepeatedField( - proto.MESSAGE, - number=152873846, - message="LogConfig", - ) - not_ins: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=518443138, - ) - permissions: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=59962500, - ) - - class SSLHealthCheck(proto.Message): r""" @@ -88472,6 +88941,14 @@ class Scheduling(proto.Message): restarted if it is terminated by Compute Engine. This field is a member of `oneof`_ ``_automatic_restart``. + availability_domain (int): + Specifies the availability domain to place + the instance in. The value must be a number + between 1 and the number of availability domains + specified in the spread placement policy + attached to the instance. + + This field is a member of `oneof`_ ``_availability_domain``. instance_termination_action (str): Specifies the termination action for the instance. Check the InstanceTerminationAction @@ -88493,6 +88970,13 @@ class Scheduling(proto.Message): API. This field is a member of `oneof`_ ``_location_hint``. + max_run_duration (google.cloud.compute_v1.types.Duration): + Specifies the max run duration for the given + instance. If specified, the instance termination + action will be performed at the end of the run + duration. + + This field is a member of `oneof`_ ``_max_run_duration``. min_node_cpus (int): The minimum number of virtual CPUs this instance will consume when running on a @@ -88514,6 +88998,9 @@ class Scheduling(proto.Message): enum for the list of possible values. This field is a member of `oneof`_ ``_on_host_maintenance``. + on_instance_stop_action (google.cloud.compute_v1.types.SchedulingOnInstanceStopAction): + + This field is a member of `oneof`_ ``_on_instance_stop_action``. preemptible (bool): Defines whether the instance is preemptible. This can only be set during instance creation or while the instance is @@ -88528,6 +89015,13 @@ class Scheduling(proto.Message): the list of possible values. This field is a member of `oneof`_ ``_provisioning_model``. + termination_time (str): + Specifies the timestamp, when the instance + will be terminated, in RFC3339 text format. If + specified, the instance termination action will + be performed at the termination time. + + This field is a member of `oneof`_ ``_termination_time``. """ class InstanceTerminationAction(proto.Enum): @@ -88600,6 +89094,11 @@ class ProvisioningModel(proto.Enum): number=350821371, optional=True, ) + availability_domain: int = proto.Field( + proto.INT32, + number=252514344, + optional=True, + ) instance_termination_action: str = proto.Field( proto.STRING, number=107380667, @@ -88616,6 +89115,12 @@ class ProvisioningModel(proto.Enum): number=350519505, optional=True, ) + max_run_duration: "Duration" = proto.Field( + proto.MESSAGE, + number=430839747, + optional=True, + message="Duration", + ) min_node_cpus: int = proto.Field( proto.INT32, number=317231675, @@ -88631,6 +89136,12 @@ class ProvisioningModel(proto.Enum): number=64616796, optional=True, ) + on_instance_stop_action: "SchedulingOnInstanceStopAction" = proto.Field( + proto.MESSAGE, + number=529876681, + optional=True, + message="SchedulingOnInstanceStopAction", + ) preemptible: bool = proto.Field( proto.BOOL, number=324203169, @@ -88641,6 +89152,11 @@ class ProvisioningModel(proto.Enum): number=494423, optional=True, ) + termination_time: str = proto.Field( + proto.STRING, + number=428082984, + optional=True, + ) class SchedulingNodeAffinity(proto.Message): @@ -88705,6 +89221,31 @@ class Operator(proto.Enum): ) +class SchedulingOnInstanceStopAction(proto.Message): + r"""Defines the behaviour for instances with the + instance_termination_action STOP. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + discard_local_ssd (bool): + If true, the contents of any attached Local + SSD disks will be discarded else, the Local SSD + data will be preserved when the instance is + stopped at the end of the run + duration/termination time. + + This field is a member of `oneof`_ ``_discard_local_ssd``. + """ + + discard_local_ssd: bool = proto.Field( + proto.BOOL, + number=319517903, + optional=True, + ) + + class ScratchDisks(proto.Message): r""" @@ -89286,12 +89827,25 @@ class SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConf auto_deploy_load_threshold (float): This field is a member of `oneof`_ ``_auto_deploy_load_threshold``. + detection_absolute_qps (float): + + This field is a member of `oneof`_ ``_detection_absolute_qps``. + detection_load_threshold (float): + + This field is a member of `oneof`_ ``_detection_load_threshold``. + detection_relative_to_baseline_qps (float): + + This field is a member of `oneof`_ ``_detection_relative_to_baseline_qps``. name (str): The name must be 1-63 characters long, and comply with RFC1035. The name must be unique within the security policy. This field is a member of `oneof`_ ``_name``. + traffic_granularity_configs (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig]): + Configuration options for enabling Adaptive + Protection to operate on specified granular + traffic units. """ auto_deploy_confidence_threshold: float = proto.Field( @@ -89314,11 +89868,98 @@ class SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConf number=522227738, optional=True, ) + detection_absolute_qps: float = proto.Field( + proto.FLOAT, + number=383123174, + optional=True, + ) + detection_load_threshold: float = proto.Field( + proto.FLOAT, + number=430499148, + optional=True, + ) + detection_relative_to_baseline_qps: float = proto.Field( + proto.FLOAT, + number=291343749, + optional=True, + ) name: str = proto.Field( proto.STRING, number=3373707, optional=True, ) + traffic_granularity_configs: MutableSequence[ + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=85367358, + message="SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig", + ) + + +class SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig( + proto.Message +): + r"""Configurations to specifc granular traffic units processed by + Adaptive Protection. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_each_unique_value (bool): + If enabled, traffic matching each unique value for the + specified type constitutes a separate traffic unit. It can + only be set to true if ``value`` is empty. + + This field is a member of `oneof`_ ``_enable_each_unique_value``. + type_ (str): + Type of this configuration. + Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + value (str): + Requests that match this value constitute a + granular traffic unit. + + This field is a member of `oneof`_ ``_value``. + """ + + class Type(proto.Enum): + r"""Type of this configuration. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + HTTP_HEADER_HOST (374321891): + No description available. + HTTP_PATH (311503228): + No description available. + UNSPECIFIED_TYPE (53933922): + No description available. + """ + UNDEFINED_TYPE = 0 + HTTP_HEADER_HOST = 374321891 + HTTP_PATH = 311503228 + UNSPECIFIED_TYPE = 53933922 + + enable_each_unique_value: bool = proto.Field( + proto.BOOL, + number=469206341, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=111972721, + optional=True, + ) class SecurityPolicyAdvancedOptionsConfig(proto.Message): @@ -91079,6 +91720,21 @@ class ServiceAttachment(proto.Message): service attachment. This field is a member of `oneof`_ ``_producer_forwarding_rule``. + propagated_connection_limit (int): + The number of consumer spokes that connected Private Service + Connect endpoints can be propagated to through Network + Connectivity Center. This limit lets the service producer + limit how many propagated Private Service Connect + connections can be established to this service attachment + from a single consumer. If the connection preference of the + service attachment is ACCEPT_MANUAL, the limit applies to + each project or network that is listed in the consumer + accept list. If the connection preference of the service + attachment is ACCEPT_AUTOMATIC, the limit applies to each + project that contains a connected endpoint. If unspecified, + the default propagated connection limit is 250. + + This field is a member of `oneof`_ ``_propagated_connection_limit``. psc_service_attachment_id (google.cloud.compute_v1.types.Uint128): [Output Only] An 128-bit global unique ID of the PSC service attachment. @@ -91211,6 +91867,11 @@ class ConnectionPreference(proto.Enum): number=247927889, optional=True, ) + propagated_connection_limit: int = proto.Field( + proto.UINT32, + number=332719230, + optional=True, + ) psc_service_attachment_id: "Uint128" = proto.Field( proto.MESSAGE, number=527695214, @@ -91334,6 +91995,12 @@ class ServiceAttachmentConnectedEndpoint(proto.Message): The url of a connected endpoint. This field is a member of `oneof`_ ``_endpoint``. + propagated_connection_count (int): + The number of consumer Network Connectivity + Center spokes that the connected Private Service + Connect endpoint has propagated to. + + This field is a member of `oneof`_ ``_propagated_connection_count``. psc_connection_id (int): The PSC connection id of the connected endpoint. @@ -91393,6 +92060,11 @@ class Status(proto.Enum): number=130489749, optional=True, ) + propagated_connection_count: int = proto.Field( + proto.UINT32, + number=324594130, + optional=True, + ) psc_connection_id: int = proto.Field( proto.UINT64, number=292082397, @@ -99165,7 +99837,9 @@ class StoragePool(proto.Message): This field is a member of `oneof`_ ``_performance_provisioning_type``. pool_provisioned_capacity_gb (int): - Size, in GiB, of the storage pool. + Size, in GiB, of the storage pool. For more + information about the size limits, see + https://cloud.google.com/compute/docs/disks/storage-pools. This field is a member of `oneof`_ ``_pool_provisioned_capacity_gb``. pool_provisioned_iops (int): @@ -99809,9 +100483,10 @@ class StoragePoolResourceStatus(proto.Message): This field is a member of `oneof`_ ``_pool_used_capacity_bytes``. pool_used_iops (int): - Sum of all the disks' provisioned IOPS, minus - some amount that is allowed per disk that is not - counted towards pool's IOPS capacity. + [Output Only] Sum of all the disks' provisioned IOPS, minus + some amount that is allowed per disk that is not counted + towards pool's IOPS capacity. For more information, see + https://cloud.google.com/compute/docs/disks/storage-pools. This field is a member of `oneof`_ ``_pool_used_iops``. pool_used_throughput (int): @@ -100331,8 +101006,8 @@ class Subnetwork(proto.Message): This field is a member of `oneof`_ ``_id``. internal_ipv6_prefix (str): - [Output Only] The internal IPv6 address range that is - assigned to this subnetwork. + The internal IPv6 address range that is owned + by this subnetwork. This field is a member of `oneof`_ ``_internal_ipv6_prefix``. ip_cidr_range (str): @@ -100616,12 +101291,16 @@ class StackType(proto.Enum): IPV4_ONLY (22373798): New VMs in this subnet will only be assigned IPv4 addresses. + IPV6_ONLY (79632100): + New VMs in this subnet will only be assigned + IPv6 addresses. UNSPECIFIED_STACK_TYPE (298084569): No description available. """ UNDEFINED_STACK_TYPE = 0 IPV4_IPV6 = 22197249 IPV4_ONLY = 22373798 + IPV6_ONLY = 79632100 UNSPECIFIED_STACK_TYPE = 298084569 class State(proto.Enum): @@ -102256,9 +102935,12 @@ class TargetHttpsProxy(proto.Message): certificate_map (str): URL of a certificate map that identifies a certificate map associated with the given target - proxy. This field can only be set for global - target proxies. If set, sslCertificates will be - ignored. Accepted format is + proxy. This field can only be set for Global + external Application Load Balancer or Classic + Application Load Balancer. For other products + use Certificate Manager Certificates instead. If + set, sslCertificates will be ignored. Accepted + format is //certificatemanager.googleapis.com/projects/{project }/locations/{location}/certificateMaps/{resourceName}. @@ -102376,9 +103058,21 @@ class TargetHttpsProxy(proto.Message): URLs to SslCertificate resources that are used to authenticate connections between users and the load balancer. At least one SSL certificate must be specified. - Currently, you may specify up to 15 SSL certificates. - sslCertificates do not apply when the load balancing scheme - is set to INTERNAL_SELF_MANAGED. + SslCertificates do not apply when the load balancing scheme + is set to INTERNAL_SELF_MANAGED. The URLs should refer to a + SSL Certificate resource or Certificate Manager Certificate + resource. Mixing Classic Certificates and Certificate + Manager Certificates is not allowed. Certificate Manager + Certificates must include the certificatemanager API. + Certificate Manager Certificates are not supported by Global + external Application Load Balancer or Classic Application + Load Balancer, use certificate_map instead. Currently, you + may specify up to 15 Classic SSL Certificates. Certificate + Manager Certificates accepted formats are: - + //certificatemanager.googleapis.com/projects/{project}/locations/{ + location}/certificates/{resourceName}. - + https://certificatemanager.googleapis.com/v1alpha1/projects/{project + }/locations/{location}/certificates/{resourceName}. ssl_policy (str): URL of SslPolicy resource that will be associated with the TargetHttpsProxy resource. @@ -102386,6 +103080,35 @@ class TargetHttpsProxy(proto.Message): SSL policy configured. This field is a member of `oneof`_ ``_ssl_policy``. + tls_early_data (str): + Specifies whether TLS 1.3 0-RTT Data ("Early + Data") should be accepted for this service. + Early Data allows a TLS resumption handshake to + include the initial application payload (a HTTP + request) alongside the handshake, reducing the + effective round trips to "zero". This applies to + TLS 1.3 connections over TCP (HTTP/2) as well as + over UDP (QUIC/h3). This can improve application + performance, especially on networks where + interruptions may be common, such as on mobile. + Requests with Early Data will have the + "Early-Data" HTTP header set on the request, + with a value of "1", to allow the backend to + determine whether Early Data was included. Note: + TLS Early Data may allow requests to be + replayed, as the data is sent to the backend + before the handshake has fully completed. + Applications that allow idempotent HTTP methods + to make non-idempotent changes, such as a GET + request updating a database, should not accept + Early Data on those requests, and reject + requests with the "Early-Data: 1" HTTP header by + returning a HTTP 425 (Too Early) status code, in + order to remain RFC compliant. The default value + is DISABLED. Check the TlsEarlyData enum for the + list of possible values. + + This field is a member of `oneof`_ ``_tls_early_data``. url_map (str): A fully-qualified or valid partial URL to the UrlMap resource that defines the mapping from @@ -102431,6 +103154,56 @@ class QuicOverride(proto.Enum): ENABLE = 438835587 NONE = 2402104 + class TlsEarlyData(proto.Enum): + r"""Specifies whether TLS 1.3 0-RTT Data ("Early Data") should be + accepted for this service. Early Data allows a TLS resumption + handshake to include the initial application payload (a HTTP + request) alongside the handshake, reducing the effective round + trips to "zero". This applies to TLS 1.3 connections over TCP + (HTTP/2) as well as over UDP (QUIC/h3). This can improve + application performance, especially on networks where + interruptions may be common, such as on mobile. Requests with + Early Data will have the "Early-Data" HTTP header set on the + request, with a value of "1", to allow the backend to determine + whether Early Data was included. Note: TLS Early Data may allow + requests to be replayed, as the data is sent to the backend + before the handshake has fully completed. Applications that + allow idempotent HTTP methods to make non-idempotent changes, + such as a GET request updating a database, should not accept + Early Data on those requests, and reject requests with the + "Early-Data: 1" HTTP header by returning a HTTP 425 (Too Early) + status code, in order to remain RFC compliant. The default value + is DISABLED. + + Values: + UNDEFINED_TLS_EARLY_DATA (0): + A value indicating that the enum field is not + set. + DISABLED (516696700): + TLS 1.3 Early Data is not advertised, and any + (invalid) attempts to send Early Data will be + rejected by closing the connection. + PERMISSIVE (504345247): + This enables TLS 1.3 0-RTT, and only allows + Early Data to be included on requests with safe + HTTP methods (GET, HEAD, OPTIONS, TRACE). This + mode does not enforce any other limitations for + requests with Early Data. The application owner + should validate that Early Data is acceptable + for a given request path. + STRICT (308826825): + This enables TLS 1.3 0-RTT, and only allows + Early Data to be included on requests with safe + HTTP methods (GET, HEAD, OPTIONS, TRACE) without + query parameters. Requests that send Early Data + with non-idempotent HTTP methods or with query + parameters will be rejected with a HTTP 425. + """ + UNDEFINED_TLS_EARLY_DATA = 0 + DISABLED = 516696700 + PERMISSIVE = 504345247 + STRICT = 308826825 + authorization_policy: str = proto.Field( proto.STRING, number=33945528, @@ -102510,6 +103283,11 @@ class QuicOverride(proto.Enum): number=295190213, optional=True, ) + tls_early_data: str = proto.Field( + proto.STRING, + number=61108426, + optional=True, + ) url_map: str = proto.Field( proto.STRING, number=367020684, @@ -103188,6 +103966,11 @@ class SessionAffinity(proto.Enum): No session affinity. Connections from the same client IP may go to any instance in the pool. + STRONG_COOKIE_AFFINITY (438628091): + Strong cookie-based affinity. Connections + bearing the same cookie will be served by the + same backend VM while that VM remains healthy, + as long as the cookie has not expired. """ UNDEFINED_SESSION_AFFINITY = 0 CLIENT_IP = 345665051 @@ -103198,6 +103981,7 @@ class SessionAffinity(proto.Enum): HEADER_FIELD = 200737960 HTTP_COOKIE = 494981627 NONE = 2402104 + STRONG_COOKIE_AFFINITY = 438628091 backup_pool: str = proto.Field( proto.STRING, @@ -107424,10 +108208,10 @@ class UrlMap(proto.Message): Routing and traffic management table. This resource defines mappings from hostnames and URL paths to either a backend service or a backend bucket. To use the global urlMaps resource, the backend - service must have a loadBalancingScheme of either EXTERNAL or - INTERNAL_SELF_MANAGED. To use the regionUrlMaps resource, the - backend service must have a loadBalancingScheme of INTERNAL_MANAGED. - For more information, read URL Map Concepts. + service must have a loadBalancingScheme of either EXTERNAL, + EXTERNAL_MANAGED, or INTERNAL_SELF_MANAGED. To use the regionUrlMaps + resource, the backend service must have a loadBalancingScheme of + INTERNAL_MANAGED. For more information, read URL Map Concepts. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -107437,6 +108221,37 @@ class UrlMap(proto.Message): [Output Only] Creation timestamp in RFC3339 text format. This field is a member of `oneof`_ ``_creation_timestamp``. + default_custom_error_response_policy (google.cloud.compute_v1.types.CustomErrorResponsePolicy): + defaultCustomErrorResponsePolicy specifies how the Load + Balancer returns error responses when BackendServiceor + BackendBucket responds with an error. This policy takes + effect at the load balancer level and applies only when no + policy has been defined for the error code at lower levels + like PathMatcher, RouteRule and PathRule within this UrlMap. + For example, consider a UrlMap with the following + configuration: - defaultCustomErrorResponsePolicy containing + policies for responding to 5xx and 4xx errors - A + PathMatcher configured for \*.example.com has + defaultCustomErrorResponsePolicy for 4xx. If a request for + http://www.example.com/ encounters a 404, the policy in + pathMatcher.defaultCustomErrorResponsePolicy will be + enforced. When the request for http://www.example.com/ + encounters a 502, the policy in + UrlMap.defaultCustomErrorResponsePolicy will be enforced. + When a request that does not match any host in + \*.example.com such as http://www.myotherexample.com/, + encounters a 404, UrlMap.defaultCustomErrorResponsePolicy + takes effect. When used in conjunction with + defaultRouteAction.retryPolicy, retries take precedence. + Only once all retries are exhausted, the + defaultCustomErrorResponsePolicy is applied. While + attempting a retry, if load balancer is successful in + reaching the service, the defaultCustomErrorResponsePolicy + is ignored and the response from the service is returned to + the client. defaultCustomErrorResponsePolicy is supported + only for global external Application Load Balancers. + + This field is a member of `oneof`_ ``_default_custom_error_response_policy``. default_route_action (google.cloud.compute_v1.types.HttpRouteAction): defaultRouteAction takes effect when none of the hostRules match. The load balancer performs @@ -107465,11 +108280,13 @@ class UrlMap(proto.Message): as URL rewrites, take effect before sending the request to the backend. However, if defaultService is specified, defaultRouteAction - cannot contain any weightedBackendServices. - Conversely, if routeAction specifies any - weightedBackendServices, service must not be - specified. If defaultService is specified, then - set either defaultUrlRedirect , or + cannot contain any + defaultRouteAction.weightedBackendServices. + Conversely, if defaultRouteAction specifies any + defaultRouteAction.weightedBackendServices, + defaultService must not be specified. If + defaultService is specified, then set either + defaultUrlRedirect , or defaultRouteAction.weightedBackendService Don't set both. defaultService has no effect when the URL map is bound to a target gRPC proxy that has @@ -107568,6 +108385,12 @@ class UrlMap(proto.Message): number=30525366, optional=True, ) + default_custom_error_response_policy: "CustomErrorResponsePolicy" = proto.Field( + proto.MESSAGE, + number=81266089, + optional=True, + message="CustomErrorResponsePolicy", + ) default_route_action: "HttpRouteAction" = proto.Field( proto.MESSAGE, number=378919466, @@ -108352,10 +109175,14 @@ class StackType(proto.Enum): IPV4_ONLY (22373798): New VMs in this subnet will only be assigned IPv4 addresses. + IPV6_ONLY (79632100): + New VMs in this subnet will only be assigned + IPv6 addresses. """ UNDEFINED_STACK_TYPE = 0 IPV4_IPV6 = 22197249 IPV4_ONLY = 22373798 + IPV6_ONLY = 79632100 external_ipv6_prefix: str = proto.Field( proto.STRING, @@ -108965,8 +109792,10 @@ class VpnGateway(proto.Message): stack_type (str): The stack type for this VPN gateway to identify the IP protocols that are enabled. Possible values are: IPV4_ONLY, - IPV4_IPV6. If not specified, IPV4_ONLY will be used. Check - the StackType enum for the list of possible values. + IPV4_IPV6, IPV6_ONLY. If not specified, IPV4_ONLY is used if + the gateway IP version is IPV4, or IPV4_IPV6 if the gateway + IP version is IPV6. Check the StackType enum for the list of + possible values. This field is a member of `oneof`_ ``_stack_type``. vpn_interfaces (MutableSequence[google.cloud.compute_v1.types.VpnGatewayVpnGatewayInterface]): @@ -108995,8 +109824,9 @@ class GatewayIpVersion(proto.Enum): class StackType(proto.Enum): r"""The stack type for this VPN gateway to identify the IP protocols - that are enabled. Possible values are: IPV4_ONLY, IPV4_IPV6. If not - specified, IPV4_ONLY will be used. + that are enabled. Possible values are: IPV4_ONLY, IPV4_IPV6, + IPV6_ONLY. If not specified, IPV4_ONLY is used if the gateway IP + version is IPV4, or IPV4_IPV6 if the gateway IP version is IPV6. Values: UNDEFINED_STACK_TYPE (0): @@ -109007,10 +109837,13 @@ class StackType(proto.Enum): protocols. IPV4_ONLY (22373798): Enable VPN gateway with only IPv4 protocol. + IPV6_ONLY (79632100): + Enable VPN gateway with only IPv6 protocol. """ UNDEFINED_STACK_TYPE = 0 IPV4_IPV6 = 22197249 IPV4_ONLY = 22373798 + IPV6_ONLY = 79632100 creation_timestamp: str = proto.Field( proto.STRING, @@ -109605,7 +110438,9 @@ class VpnTunnel(proto.Message): establishing the VPN tunnel with the peer VPN gateway. The value should be a CIDR formatted string, for example: 192.168.0.0/16. The ranges - must be disjoint. Only IPv4 is supported. + must be disjoint. Only IPv4 is supported for + Classic VPN tunnels. This field is output only + for HA VPN tunnels. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -109647,7 +110482,8 @@ class VpnTunnel(proto.Message): This field is a member of `oneof`_ ``_peer_gcp_gateway``. peer_ip (str): IP address of the peer VPN gateway. Only IPv4 - is supported. + is supported. This field can be set only for + Classic VPN tunnels. This field is a member of `oneof`_ ``_peer_ip``. region (str): @@ -109662,7 +110498,9 @@ class VpnTunnel(proto.Message): establishing the VPN tunnel with the peer VPN gateway. The value should be a CIDR formatted string, for example: 192.168.0.0/16. The ranges - should be disjoint. Only IPv4 is supported. + should be disjoint. Only IPv4 is supported for + Classic VPN tunnels. This field is output only + for HA VPN tunnels. router (str): URL of the router resource to be used for dynamic routing. @@ -109713,7 +110551,8 @@ class VpnTunnel(proto.Message): target_vpn_gateway (str): URL of the Target VPN gateway with which this VPN tunnel is associated. Provided by the client - when the VPN tunnel is created. + when the VPN tunnel is created. This field can + be set only for Classic VPN tunnels. This field is a member of `oneof`_ ``_target_vpn_gateway``. vpn_gateway (str): @@ -110195,7 +111034,8 @@ class WaitGlobalOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to return. + Name of the Operations resource to return, or + its unique numeric identifier. project (str): Project ID for this request. """ @@ -110216,7 +111056,8 @@ class WaitRegionOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to return. + Name of the Operations resource to return, or + its unique numeric identifier. project (str): Project ID for this request. region (str): @@ -110243,7 +111084,8 @@ class WaitZoneOperationRequest(proto.Message): Attributes: operation (str): - Name of the Operations resource to return. + Name of the Operations resource to return, or + its unique numeric identifier. project (str): Project ID for this request. zone (str): @@ -110642,8 +111484,11 @@ class WeightedBackendService(proto.Message): a user's request has been directed to a backend service, subsequent requests are sent to the same backend service as determined by the - backend service's session affinity policy. The - value must be from 0 to 1000. + backend service's session affinity policy. Don't + configure session affinity if you're using + weighted traffic splitting. If you do, the + weighted traffic splitting configuration takes + precedence. The value must be from 0 to 1000. This field is a member of `oneof`_ ``_weight``. """ @@ -110896,7 +111741,7 @@ class Type(proto.Enum): class Zone(proto.Message): r"""Represents a Zone resource. A zone is a deployment area. These deployment areas are subsets of a region. For example the - zone us-east1-a is located in the us-east1 region. For more + zone us-east1-b is located in the us-east1 region. For more information, read Regions and Zones. diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index 0ad8cd732000..a907a321c94f 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-compute", - "version": "1.20.0" + "version": "1.21.0" }, "snippets": [ { diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py index ea3620781e44..a554a17f9c63 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py @@ -313,85 +313,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AcceleratorTypesClient, transports.AcceleratorTypesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py index f5239fa822c0..52c35826df5c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py @@ -294,85 +294,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AddressesClient, transports.AddressesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py index 334fa041814d..10367f5db405 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py @@ -300,85 +300,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (AutoscalersClient, transports.AutoscalersRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py index 399fd0ce1c51..83563aedffa0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BackendBucketsClient, transports.BackendBucketsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5864,6 +5785,7 @@ def test_insert_rest_call_success(request_type): "kind": "kind_value", "name": "name_value", "self_link": "self_link_value", + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6259,6 +6181,7 @@ def test_patch_rest_call_success(request_type): "kind": "kind_value", "name": "name_value", "self_link": "self_link_value", + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6769,42 +6692,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } @@ -7244,6 +7131,7 @@ def test_update_rest_call_success(request_type): "kind": "kind_value", "name": "name_value", "self_link": "self_link_value", + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py index 871813c27d2e..20bef7bb2d30 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BackendServicesClient, transports.BackendServicesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6871,6 +6792,7 @@ def test_get_rest_call_success(request_type): fingerprint="fingerprint_value", health_checks=["health_checks_value"], id=205, + ip_address_selection_policy="ip_address_selection_policy_value", kind="kind_value", load_balancing_scheme="load_balancing_scheme_value", locality_lb_policy="locality_lb_policy_value", @@ -6912,6 +6834,7 @@ def test_get_rest_call_success(request_type): assert response.fingerprint == "fingerprint_value" assert response.health_checks == ["health_checks_value"] assert response.id == 205 + assert response.ip_address_selection_policy == "ip_address_selection_policy_value" assert response.kind == "kind_value" assert response.load_balancing_scheme == "load_balancing_scheme_value" assert response.locality_lb_policy == "locality_lb_policy_value" @@ -7447,6 +7370,7 @@ def test_insert_rest_call_success(request_type): "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", }, "id": 205, + "ip_address_selection_policy": "ip_address_selection_policy_value", "kind": "kind_value", "load_balancing_scheme": "load_balancing_scheme_value", "locality_lb_policies": [ @@ -7501,6 +7425,11 @@ def test_insert_rest_call_success(request_type): "service_bindings": ["service_bindings_value1", "service_bindings_value2"], "service_lb_policy": "service_lb_policy_value", "session_affinity": "session_affinity_value", + "strong_session_affinity_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {}, + }, "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, "used_by": [{"reference": "reference_value"}], @@ -8091,6 +8020,7 @@ def test_patch_rest_call_success(request_type): "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", }, "id": 205, + "ip_address_selection_policy": "ip_address_selection_policy_value", "kind": "kind_value", "load_balancing_scheme": "load_balancing_scheme_value", "locality_lb_policies": [ @@ -8145,6 +8075,11 @@ def test_patch_rest_call_success(request_type): "service_bindings": ["service_bindings_value1", "service_bindings_value2"], "service_lb_policy": "service_lb_policy_value", "session_affinity": "session_affinity_value", + "strong_session_affinity_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {}, + }, "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, "used_by": [{"reference": "reference_value"}], @@ -8658,42 +8593,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } @@ -9437,6 +9336,7 @@ def test_update_rest_call_success(request_type): "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", }, "id": 205, + "ip_address_selection_policy": "ip_address_selection_policy_value", "kind": "kind_value", "load_balancing_scheme": "load_balancing_scheme_value", "locality_lb_policies": [ @@ -9491,6 +9391,11 @@ def test_update_rest_call_success(request_type): "service_bindings": ["service_bindings_value1", "service_bindings_value2"], "service_lb_policy": "service_lb_policy_value", "session_affinity": "session_affinity_value", + "strong_session_affinity_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {}, + }, "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, "used_by": [{"reference": "reference_value"}], diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py index 1a3969d40975..81d46d2759cb 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py @@ -287,85 +287,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DiskTypesClient, transports.DiskTypesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py index 20b3cf9644a7..822ec1ab601b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py @@ -275,85 +275,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DisksClient, transports.DisksRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3439,7 +3360,7 @@ def test_insert_rest_flattened(): mock_args = dict( project="project_value", zone="zone_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -3478,7 +3399,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): compute.InsertDiskRequest(), project="project_value", zone="zone_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -3647,7 +3568,7 @@ def test_insert_unary_rest_flattened(): mock_args = dict( project="project_value", zone="zone_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -3686,7 +3607,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): compute.InsertDiskRequest(), project="project_value", zone="zone_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -7090,7 +7011,7 @@ def test_update_rest_flattened(): project="project_value", zone="zone_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -7130,7 +7051,7 @@ def test_update_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -7307,7 +7228,7 @@ def test_update_unary_rest_flattened(): project="project_value", zone="zone_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -7347,7 +7268,7 @@ def test_update_unary_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -8502,6 +8423,7 @@ def test_get_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Disk( + access_mode="access_mode_value", architecture="architecture_value", creation_timestamp="creation_timestamp_value", description="description_value", @@ -8557,6 +8479,7 @@ def test_get_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Disk) + assert response.access_mode == "access_mode_value" assert response.architecture == "architecture_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" @@ -8813,6 +8736,7 @@ def test_insert_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} request_init["disk_resource"] = { + "access_mode": "access_mode_value", "architecture": "architecture_value", "async_primary_disk": { "consistency_group_policy": "consistency_group_policy_value", @@ -9729,42 +9653,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } @@ -11041,6 +10929,7 @@ def test_update_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request_init["disk_resource"] = { + "access_mode": "access_mode_value", "architecture": "architecture_value", "async_primary_disk": { "consistency_group_policy": "consistency_group_policy_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py index aa3fc2c28cae..71d639a5ff35 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py @@ -334,89 +334,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ExternalVpnGatewaysClient, - transports.ExternalVpnGatewaysRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py index c44613ae2896..6753777e9064 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirewallPoliciesClient, transports.FirewallPoliciesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9462,42 +9383,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py index 83b4a878122e..e60e8f020c86 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py @@ -294,85 +294,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirewallsClient, transports.FirewallsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py index 008d2ef983b9..ae8980ee4be6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ForwardingRulesClient, transports.ForwardingRulesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py index 774d4d4b0b20..a5e189fb604f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (GlobalAddressesClient, transports.GlobalAddressesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py index ab27a72b4460..8025917c13e2 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - GlobalForwardingRulesClient, - transports.GlobalForwardingRulesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py index 51cde4807c37..0eff422926de 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py @@ -344,89 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - GlobalNetworkEndpointGroupsClient, - transports.GlobalNetworkEndpointGroupsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3538,9 +3455,11 @@ def test_attach_network_endpoints_rest_call_success(request_type): "network_endpoints": [ { "annotations": {}, + "client_destination_port": 2468, "fqdn": "fqdn_value", "instance": "instance_value", "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "port": 453, } ] @@ -3957,9 +3876,11 @@ def test_detach_network_endpoints_rest_call_success(request_type): "network_endpoints": [ { "annotations": {}, + "client_destination_port": 2468, "fqdn": "fqdn_value", "instance": "instance_value", "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "port": 453, } ] @@ -4381,6 +4302,7 @@ def test_insert_rest_call_success(request_type): "network_endpoint_type": "network_endpoint_type_value", "psc_data": { "consumer_psc_address": "consumer_psc_address_value", + "producer_port": 1416, "psc_connection_id": 1793, "psc_connection_status": "psc_connection_status_value", }, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py index 6cbff281c1a9..6ac627a5ca57 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py @@ -313,85 +313,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (GlobalOperationsClient, transports.GlobalOperationsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py index 675eb3067bb3..9b9ac6b6c5cc 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py @@ -339,89 +339,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - GlobalOrganizationOperationsClient, - transports.GlobalOrganizationOperationsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py index 71523c1c2026..409352834f3f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py @@ -348,89 +348,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - GlobalPublicDelegatedPrefixesClient, - transports.GlobalPublicDelegatedPrefixesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py index e0ccf14dd212..8a3017eab1b1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py @@ -304,85 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (HealthChecksClient, transports.HealthChecksRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3673,6 +3594,7 @@ def test_get_rest_call_success(request_type): name="name_value", region="region_value", self_link="self_link_value", + source_regions=["source_regions_value"], timeout_sec=1185, type_="type__value", unhealthy_threshold=2046, @@ -3700,6 +3622,7 @@ def test_get_rest_call_success(request_type): assert response.name == "name_value" assert response.region == "region_value" assert response.self_link == "self_link_value" + assert response.source_regions == ["source_regions_value"] assert response.timeout_sec == 1185 assert response.type_ == "type__value" assert response.unhealthy_threshold == 2046 @@ -3839,6 +3762,7 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "region": "region_value", "self_link": "self_link_value", + "source_regions": ["source_regions_value1", "source_regions_value2"], "ssl_health_check": { "port": 453, "port_name": "port_name_value", @@ -4254,6 +4178,7 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "region": "region_value", "self_link": "self_link_value", + "source_regions": ["source_regions_value1", "source_regions_value2"], "ssl_health_check": { "port": 453, "port_name": "port_name_value", @@ -4546,6 +4471,7 @@ def test_update_rest_call_success(request_type): "name": "name_value", "region": "region_value", "self_link": "self_link_value", + "source_regions": ["source_regions_value1", "source_regions_value2"], "ssl_health_check": { "port": 453, "port_name": "port_name_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py index 83669713c4a6..7bae0a5195af 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py @@ -312,85 +312,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ImageFamilyViewsClient, transports.ImageFamilyViewsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py index e4a9f1468951..e4ce780bf79d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py @@ -279,85 +279,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ImagesClient, transports.ImagesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5820,42 +5741,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py index 278cdfa2337c..98ed345ea93d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py @@ -354,89 +354,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - InstanceGroupManagerResizeRequestsClient, - transports.InstanceGroupManagerResizeRequestsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py index 5f055d896709..72303de5c880 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - InstanceGroupManagersClient, - transports.InstanceGroupManagersRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10520,6 +10437,8 @@ def test_get_rest_call_success(request_type): list_managed_instances_results="list_managed_instances_results_value", name="name_value", region="region_value", + satisfies_pzi=True, + satisfies_pzs=True, self_link="self_link_value", target_pools=["target_pools_value"], target_size=1185, @@ -10553,6 +10472,8 @@ def test_get_rest_call_success(request_type): ) assert response.name == "name_value" assert response.region == "region_value" + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True assert response.self_link == "self_link_value" assert response.target_pools == ["target_pools_value"] assert response.target_size == 1185 @@ -10684,6 +10605,7 @@ def test_insert_rest_call_success(request_type): }, "fingerprint": "fingerprint_value", "id": 205, + "instance_flexibility_policy": {"instance_selections": {}}, "instance_group": "instance_group_value", "instance_lifecycle_policy": { "default_action_on_failure": "default_action_on_failure_value", @@ -10695,6 +10617,8 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", + "satisfies_pzi": True, + "satisfies_pzs": True, "self_link": "self_link_value", "stateful_policy": { "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} @@ -11522,6 +11446,7 @@ def test_patch_rest_call_success(request_type): }, "fingerprint": "fingerprint_value", "id": 205, + "instance_flexibility_policy": {"instance_selections": {}}, "instance_group": "instance_group_value", "instance_lifecycle_policy": { "default_action_on_failure": "default_action_on_failure_value", @@ -11533,6 +11458,8 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", + "satisfies_pzi": True, + "satisfies_pzs": True, "self_link": "self_link_value", "stateful_policy": { "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py index 5eaad7a68782..95e87f3db9cd 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (InstanceGroupsClient, transports.InstanceGroupsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py index 056921fff75b..9801d874beae 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py @@ -335,89 +335,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - InstanceSettingsServiceClient, - transports.InstanceSettingsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py index a6caf3a470b2..c535e17bf157 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (InstanceTemplatesClient, transports.InstanceTemplatesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3775,11 +3696,16 @@ def test_insert_rest_call_success(request_type): "advanced_machine_features": { "enable_nested_virtualization": True, "enable_uefi_networking": True, + "performance_monitoring_unit": "performance_monitoring_unit_value", "threads_per_core": 1689, + "turbo_mode": "turbo_mode_value", "visible_core_count": 1918, }, "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, + "confidential_instance_config": { + "confidential_instance_type": "confidential_instance_type_value", + "enable_confidential_compute": True, + }, "description": "description_value", "disks": [ { @@ -3911,9 +3837,11 @@ def test_insert_rest_call_success(request_type): ], "scheduling": { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -3923,8 +3851,10 @@ def test_insert_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", }, "service_accounts": [ {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} @@ -4350,42 +4280,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py index 7ec0982e002b..4739891329c4 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py @@ -294,85 +294,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (InstancesClient, transports.InstancesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -20384,11 +20305,16 @@ def test_bulk_insert_rest_call_success(request_type): "advanced_machine_features": { "enable_nested_virtualization": True, "enable_uefi_networking": True, + "performance_monitoring_unit": "performance_monitoring_unit_value", "threads_per_core": 1689, + "turbo_mode": "turbo_mode_value", "visible_core_count": 1918, }, "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, + "confidential_instance_config": { + "confidential_instance_type": "confidential_instance_type_value", + "enable_confidential_compute": True, + }, "description": "description_value", "disks": [ { @@ -20520,9 +20446,11 @@ def test_bulk_insert_rest_call_success(request_type): ], "scheduling": { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -20532,8 +20460,10 @@ def test_bulk_insert_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", }, "service_accounts": [ {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} @@ -22154,11 +22084,16 @@ def test_insert_rest_call_success(request_type): "advanced_machine_features": { "enable_nested_virtualization": True, "enable_uefi_networking": True, + "performance_monitoring_unit": "performance_monitoring_unit_value", "threads_per_core": 1689, + "turbo_mode": "turbo_mode_value", "visible_core_count": 1918, }, "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, + "confidential_instance_config": { + "confidential_instance_type": "confidential_instance_type_value", + "enable_confidential_compute": True, + }, "cpu_platform": "cpu_platform_value", "creation_timestamp": "creation_timestamp_value", "deletion_protection": True, @@ -22295,6 +22230,7 @@ def test_insert_rest_call_success(request_type): "resource_policies": ["resource_policies_value1", "resource_policies_value2"], "resource_status": { "physical_host": "physical_host_value", + "scheduling": {"availability_domain": 2002}, "upcoming_maintenance": { "can_reschedule": True, "latest_window_start_time": "latest_window_start_time_value", @@ -22308,9 +22244,11 @@ def test_insert_rest_call_success(request_type): "satisfies_pzs": True, "scheduling": { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -22320,8 +22258,10 @@ def test_insert_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", }, "self_link": "self_link_value", "service_accounts": [ @@ -23997,42 +23937,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } @@ -25669,9 +25573,11 @@ def test_set_scheduling_rest_call_success(request_type): request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request_init["scheduling_resource"] = { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -25681,8 +25587,10 @@ def test_set_scheduling_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -27953,11 +27861,16 @@ def test_update_rest_call_success(request_type): "advanced_machine_features": { "enable_nested_virtualization": True, "enable_uefi_networking": True, + "performance_monitoring_unit": "performance_monitoring_unit_value", "threads_per_core": 1689, + "turbo_mode": "turbo_mode_value", "visible_core_count": 1918, }, "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, + "confidential_instance_config": { + "confidential_instance_type": "confidential_instance_type_value", + "enable_confidential_compute": True, + }, "cpu_platform": "cpu_platform_value", "creation_timestamp": "creation_timestamp_value", "deletion_protection": True, @@ -28094,6 +28007,7 @@ def test_update_rest_call_success(request_type): "resource_policies": ["resource_policies_value1", "resource_policies_value2"], "resource_status": { "physical_host": "physical_host_value", + "scheduling": {"availability_domain": 2002}, "upcoming_maintenance": { "can_reschedule": True, "latest_window_start_time": "latest_window_start_time_value", @@ -28107,9 +28021,11 @@ def test_update_rest_call_success(request_type): "satisfies_pzs": True, "scheduling": { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -28119,8 +28035,10 @@ def test_update_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", }, "self_link": "self_link_value", "service_accounts": [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py index c796e79a7827..039e53bc7aed 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (InstantSnapshotsClient, transports.InstantSnapshotsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4728,42 +4649,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py index 6fc7855b0497..0fea67a09c43 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - InterconnectAttachmentsClient, - transports.InterconnectAttachmentsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py index dbaac9fb441b..e3d47b09cf5c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -329,89 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - InterconnectLocationsClient, - transports.InterconnectLocationsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py index f0dd7a730818..cd8e2d9c5c72 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py @@ -337,89 +337,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - InterconnectRemoteLocationsClient, - transports.InterconnectRemoteLocationsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py index 319d55f7cd55..51359bb6cb71 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py @@ -315,85 +315,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (InterconnectsClient, transports.InterconnectsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py index ec2ab714269c..334143e0fd3a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py @@ -296,85 +296,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (LicenseCodesClient, transports.LicenseCodesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py index 18e5e00c0d5c..c4cdcdb95e12 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py @@ -287,85 +287,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (LicensesClient, transports.LicensesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3710,42 +3631,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py index 9849725306b5..57836c3a368a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py @@ -315,85 +315,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MachineImagesClient, transports.MachineImagesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3363,11 +3284,16 @@ def test_insert_rest_call_success(request_type): "advanced_machine_features": { "enable_nested_virtualization": True, "enable_uefi_networking": True, + "performance_monitoring_unit": "performance_monitoring_unit_value", "threads_per_core": 1689, + "turbo_mode": "turbo_mode_value", "visible_core_count": 1918, }, "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, + "confidential_instance_config": { + "confidential_instance_type": "confidential_instance_type_value", + "enable_confidential_compute": True, + }, "description": "description_value", "disks": [ { @@ -3499,9 +3425,11 @@ def test_insert_rest_call_success(request_type): ], "scheduling": { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -3511,8 +3439,10 @@ def test_insert_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", }, "service_accounts": [ {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} @@ -3976,42 +3906,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py index fecdc7603ed6..6983bb5ab44e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py @@ -297,85 +297,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MachineTypesClient, transports.MachineTypesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1950,6 +1871,7 @@ def test_get_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.MachineType( + architecture="architecture_value", creation_timestamp="creation_timestamp_value", description="description_value", guest_cpus=1090, @@ -1978,6 +1900,7 @@ def test_get_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.MachineType) + assert response.architecture == "architecture_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.guest_cpus == 1090 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py index 6afd87292c28..bce42b021bd6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py @@ -332,85 +332,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (NetworkAttachmentsClient, transports.NetworkAttachmentsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5057,42 +4978,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py index ee101af2c254..2a65e7f6c25e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py @@ -344,89 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - NetworkEdgeSecurityServicesClient, - transports.NetworkEdgeSecurityServicesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py index c01ab77835aa..02700a8eeccc 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - NetworkEndpointGroupsClient, - transports.NetworkEndpointGroupsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4276,9 +4193,11 @@ def test_attach_network_endpoints_rest_call_success(request_type): "network_endpoints": [ { "annotations": {}, + "client_destination_port": 2468, "fqdn": "fqdn_value", "instance": "instance_value", "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "port": 453, } ] @@ -4707,9 +4626,11 @@ def test_detach_network_endpoints_rest_call_success(request_type): "network_endpoints": [ { "annotations": {}, + "client_destination_port": 2468, "fqdn": "fqdn_value", "instance": "instance_value", "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "port": 453, } ] @@ -5133,6 +5054,7 @@ def test_insert_rest_call_success(request_type): "network_endpoint_type": "network_endpoint_type_value", "psc_data": { "consumer_psc_address": "consumer_psc_address_value", + "producer_port": 1416, "psc_connection_id": 1793, "psc_connection_status": "psc_connection_status_value", }, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py index 9c8600e18667..7aa671940d38 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - NetworkFirewallPoliciesClient, - transports.NetworkFirewallPoliciesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9131,42 +9048,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py index 6d50602fcba3..7f06d791adba 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py @@ -287,85 +287,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (NetworksClient, transports.NetworksRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py index 415c7dfa423f..dc3d2395332f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py @@ -296,85 +296,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (NodeGroupsClient, transports.NodeGroupsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -8247,42 +8168,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py index ef29efc0f889..4fd0d1d2ff0d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py @@ -315,85 +315,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (NodeTemplatesClient, transports.NodeTemplatesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4259,42 +4180,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py index 7698fd555902..af6654efc240 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py @@ -287,85 +287,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (NodeTypesClient, transports.NodeTypesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py index b6e6f7282870..606ff885e03b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PacketMirroringsClient, transports.PacketMirroringsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py index 5a29cd0b7afe..e0ddb2c1f1e1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py @@ -287,85 +287,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ProjectsClient, transports.ProjectsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py index 08bba598275f..d801f651e993 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - PublicAdvertisedPrefixesClient, - transports.PublicAdvertisedPrefixesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py index 6c8d32af5a8f..db4a041cba8f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - PublicDelegatedPrefixesClient, - transports.PublicDelegatedPrefixesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py index 010624999427..22d3737e2648 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionAutoscalersClient, transports.RegionAutoscalersRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py index 1effe7b9b165..0a1ed93c9c78 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionBackendServicesClient, - transports.RegionBackendServicesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5032,6 +4949,7 @@ def test_get_rest_call_success(request_type): fingerprint="fingerprint_value", health_checks=["health_checks_value"], id=205, + ip_address_selection_policy="ip_address_selection_policy_value", kind="kind_value", load_balancing_scheme="load_balancing_scheme_value", locality_lb_policy="locality_lb_policy_value", @@ -5073,6 +4991,7 @@ def test_get_rest_call_success(request_type): assert response.fingerprint == "fingerprint_value" assert response.health_checks == ["health_checks_value"] assert response.id == 205 + assert response.ip_address_selection_policy == "ip_address_selection_policy_value" assert response.kind == "kind_value" assert response.load_balancing_scheme == "load_balancing_scheme_value" assert response.locality_lb_policy == "locality_lb_policy_value" @@ -5618,6 +5537,7 @@ def test_insert_rest_call_success(request_type): "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", }, "id": 205, + "ip_address_selection_policy": "ip_address_selection_policy_value", "kind": "kind_value", "load_balancing_scheme": "load_balancing_scheme_value", "locality_lb_policies": [ @@ -5672,6 +5592,11 @@ def test_insert_rest_call_success(request_type): "service_bindings": ["service_bindings_value1", "service_bindings_value2"], "service_lb_policy": "service_lb_policy_value", "session_affinity": "session_affinity_value", + "strong_session_affinity_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {}, + }, "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, "used_by": [{"reference": "reference_value"}], @@ -6270,6 +6195,7 @@ def test_patch_rest_call_success(request_type): "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", }, "id": 205, + "ip_address_selection_policy": "ip_address_selection_policy_value", "kind": "kind_value", "load_balancing_scheme": "load_balancing_scheme_value", "locality_lb_policies": [ @@ -6324,6 +6250,11 @@ def test_patch_rest_call_success(request_type): "service_bindings": ["service_bindings_value1", "service_bindings_value2"], "service_lb_policy": "service_lb_policy_value", "session_affinity": "session_affinity_value", + "strong_session_affinity_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {}, + }, "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, "used_by": [{"reference": "reference_value"}], @@ -6598,42 +6529,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } @@ -7395,6 +7290,7 @@ def test_update_rest_call_success(request_type): "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", }, "id": 205, + "ip_address_selection_policy": "ip_address_selection_policy_value", "kind": "kind_value", "load_balancing_scheme": "load_balancing_scheme_value", "locality_lb_policies": [ @@ -7449,6 +7345,11 @@ def test_update_rest_call_success(request_type): "service_bindings": ["service_bindings_value1", "service_bindings_value2"], "service_lb_policy": "service_lb_policy_value", "session_affinity": "session_affinity_value", + "strong_session_affinity_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {}, + }, "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, "used_by": [{"reference": "reference_value"}], diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py index cbafdda9fd84..a207351e4ecf 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionCommitmentsClient, transports.RegionCommitmentsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2851,6 +2772,7 @@ def test_get_rest_call_success(request_type): auto_renew=True, category="category_value", creation_timestamp="creation_timestamp_value", + custom_end_timestamp="custom_end_timestamp_value", description="description_value", end_timestamp="end_timestamp_value", existing_reservations=["existing_reservations_value"], @@ -2884,6 +2806,7 @@ def test_get_rest_call_success(request_type): assert response.auto_renew is True assert response.category == "category_value" assert response.creation_timestamp == "creation_timestamp_value" + assert response.custom_end_timestamp == "custom_end_timestamp_value" assert response.description == "description_value" assert response.end_timestamp == "end_timestamp_value" assert response.existing_reservations == ["existing_reservations_value"] @@ -2997,6 +2920,7 @@ def test_insert_rest_call_success(request_type): "auto_renew": True, "category": "category_value", "creation_timestamp": "creation_timestamp_value", + "custom_end_timestamp": "custom_end_timestamp_value", "description": "description_value", "end_timestamp": "end_timestamp_value", "existing_reservations": [ @@ -3072,6 +2996,9 @@ def test_insert_rest_call_success(request_type): "zone": "zone_value", } ], + "resource_status": { + "custom_term_eligibility_end_timestamp": "custom_term_eligibility_end_timestamp_value" + }, "resources": [ { "accelerator_type": "accelerator_type_value", @@ -3441,6 +3368,7 @@ def test_update_rest_call_success(request_type): "auto_renew": True, "category": "category_value", "creation_timestamp": "creation_timestamp_value", + "custom_end_timestamp": "custom_end_timestamp_value", "description": "description_value", "end_timestamp": "end_timestamp_value", "existing_reservations": [ @@ -3516,6 +3444,9 @@ def test_update_rest_call_success(request_type): "zone": "zone_value", } ], + "resource_status": { + "custom_term_eligibility_end_timestamp": "custom_term_eligibility_end_timestamp_value" + }, "resources": [ { "accelerator_type": "accelerator_type_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py index 8a3adeed21ba..995fcdedbf99 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py @@ -312,85 +312,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionDiskTypesClient, transports.RegionDiskTypesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py index 559d5f9453f6..cd41e2c06498 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py @@ -300,85 +300,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionDisksClient, transports.RegionDisksRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3181,7 +3102,7 @@ def test_insert_rest_flattened(): mock_args = dict( project="project_value", region="region_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -3220,7 +3141,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): compute.InsertRegionDiskRequest(), project="project_value", region="region_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -3391,7 +3312,7 @@ def test_insert_unary_rest_flattened(): mock_args = dict( project="project_value", region="region_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -3430,7 +3351,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): compute.InsertRegionDiskRequest(), project="project_value", region="region_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -6846,7 +6767,7 @@ def test_update_rest_flattened(): project="project_value", region="region_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -6886,7 +6807,7 @@ def test_update_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -7065,7 +6986,7 @@ def test_update_unary_rest_flattened(): project="project_value", region="region_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) mock_args.update(sample_request) @@ -7105,7 +7026,7 @@ def test_update_unary_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", disk="disk_value", - disk_resource=compute.Disk(architecture="architecture_value"), + disk_resource=compute.Disk(access_mode="access_mode_value"), ) @@ -8149,6 +8070,7 @@ def test_get_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Disk( + access_mode="access_mode_value", architecture="architecture_value", creation_timestamp="creation_timestamp_value", description="description_value", @@ -8204,6 +8126,7 @@ def test_get_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Disk) + assert response.access_mode == "access_mode_value" assert response.architecture == "architecture_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" @@ -8466,6 +8389,7 @@ def test_insert_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["disk_resource"] = { + "access_mode": "access_mode_value", "architecture": "architecture_value", "async_primary_disk": { "consistency_group_policy": "consistency_group_policy_value", @@ -9400,42 +9324,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } @@ -10726,6 +10614,7 @@ def test_update_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request_init["disk_resource"] = { + "access_mode": "access_mode_value", "architecture": "architecture_value", "async_primary_disk": { "consistency_group_policy": "consistency_group_policy_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py index 5cb9ecf4e1b9..ddb0b71a61fb 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py @@ -340,89 +340,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionHealthCheckServicesClient, - transports.RegionHealthCheckServicesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py index 39583e7d0a1f..d927db1ea0f8 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py @@ -332,85 +332,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionHealthChecksClient, transports.RegionHealthChecksRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3443,6 +3364,7 @@ def test_get_rest_call_success(request_type): name="name_value", region="region_value", self_link="self_link_value", + source_regions=["source_regions_value"], timeout_sec=1185, type_="type__value", unhealthy_threshold=2046, @@ -3470,6 +3392,7 @@ def test_get_rest_call_success(request_type): assert response.name == "name_value" assert response.region == "region_value" assert response.self_link == "self_link_value" + assert response.source_regions == ["source_regions_value"] assert response.timeout_sec == 1185 assert response.type_ == "type__value" assert response.unhealthy_threshold == 2046 @@ -3611,6 +3534,7 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "region": "region_value", "self_link": "self_link_value", + "source_regions": ["source_regions_value1", "source_regions_value2"], "ssl_health_check": { "port": 453, "port_name": "port_name_value", @@ -4036,6 +3960,7 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "region": "region_value", "self_link": "self_link_value", + "source_regions": ["source_regions_value1", "source_regions_value2"], "ssl_health_check": { "port": 453, "port_name": "port_name_value", @@ -4338,6 +4263,7 @@ def test_update_rest_call_success(request_type): "name": "name_value", "region": "region_value", "self_link": "self_link_value", + "source_regions": ["source_regions_value1", "source_regions_value2"], "ssl_health_check": { "port": 453, "port_name": "port_name_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py index 3db016494a39..dce6116f6c5b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -344,89 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionInstanceGroupManagersClient, - transports.RegionInstanceGroupManagersRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10148,6 +10065,8 @@ def test_get_rest_call_success(request_type): list_managed_instances_results="list_managed_instances_results_value", name="name_value", region="region_value", + satisfies_pzi=True, + satisfies_pzs=True, self_link="self_link_value", target_pools=["target_pools_value"], target_size=1185, @@ -10181,6 +10100,8 @@ def test_get_rest_call_success(request_type): ) assert response.name == "name_value" assert response.region == "region_value" + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True assert response.self_link == "self_link_value" assert response.target_pools == ["target_pools_value"] assert response.target_size == 1185 @@ -10312,6 +10233,7 @@ def test_insert_rest_call_success(request_type): }, "fingerprint": "fingerprint_value", "id": 205, + "instance_flexibility_policy": {"instance_selections": {}}, "instance_group": "instance_group_value", "instance_lifecycle_policy": { "default_action_on_failure": "default_action_on_failure_value", @@ -10323,6 +10245,8 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", + "satisfies_pzi": True, + "satisfies_pzs": True, "self_link": "self_link_value", "stateful_policy": { "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} @@ -11161,6 +11085,7 @@ def test_patch_rest_call_success(request_type): }, "fingerprint": "fingerprint_value", "id": 205, + "instance_flexibility_policy": {"instance_selections": {}}, "instance_group": "instance_group_value", "instance_lifecycle_policy": { "default_action_on_failure": "default_action_on_failure_value", @@ -11172,6 +11097,8 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", + "satisfies_pzi": True, + "satisfies_pzs": True, "self_link": "self_link_value", "stateful_policy": { "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py index 5a2563bd683b..61bd7eba7093 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py @@ -334,89 +334,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionInstanceGroupsClient, - transports.RegionInstanceGroupsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py index 9d8b6ec8fc63..81a2b90c8bcb 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionInstanceTemplatesClient, - transports.RegionInstanceTemplatesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2761,11 +2678,16 @@ def test_insert_rest_call_success(request_type): "advanced_machine_features": { "enable_nested_virtualization": True, "enable_uefi_networking": True, + "performance_monitoring_unit": "performance_monitoring_unit_value", "threads_per_core": 1689, + "turbo_mode": "turbo_mode_value", "visible_core_count": 1918, }, "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, + "confidential_instance_config": { + "confidential_instance_type": "confidential_instance_type_value", + "enable_confidential_compute": True, + }, "description": "description_value", "disks": [ { @@ -2897,9 +2819,11 @@ def test_insert_rest_call_success(request_type): ], "scheduling": { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -2909,8 +2833,10 @@ def test_insert_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", }, "service_accounts": [ {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py index 02963274d0e5..c0f88c5edf38 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py @@ -318,85 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionInstancesClient, transports.RegionInstancesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1528,11 +1449,16 @@ def test_bulk_insert_rest_call_success(request_type): "advanced_machine_features": { "enable_nested_virtualization": True, "enable_uefi_networking": True, + "performance_monitoring_unit": "performance_monitoring_unit_value", "threads_per_core": 1689, + "turbo_mode": "turbo_mode_value", "visible_core_count": 1918, }, "can_ip_forward": True, - "confidential_instance_config": {"enable_confidential_compute": True}, + "confidential_instance_config": { + "confidential_instance_type": "confidential_instance_type_value", + "enable_confidential_compute": True, + }, "description": "description_value", "disks": [ { @@ -1664,9 +1590,11 @@ def test_bulk_insert_rest_call_success(request_type): ], "scheduling": { "automatic_restart": True, + "availability_domain": 2002, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", + "max_run_duration": {}, "min_node_cpus": 1379, "node_affinities": [ { @@ -1676,8 +1604,10 @@ def test_bulk_insert_rest_call_success(request_type): } ], "on_host_maintenance": "on_host_maintenance_value", + "on_instance_stop_action": {"discard_local_ssd": True}, "preemptible": True, "provisioning_model": "provisioning_model_value", + "termination_time": "termination_time_value", }, "service_accounts": [ {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py index 5d15f52a5e30..6e2fd3003cce 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionInstantSnapshotsClient, - transports.RegionInstantSnapshotsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4363,42 +4280,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py index 380cf85f2317..2a8e6b937c35 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py @@ -344,89 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionNetworkEndpointGroupsClient, - transports.RegionNetworkEndpointGroupsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3663,9 +3580,11 @@ def test_attach_network_endpoints_rest_call_success(request_type): "network_endpoints": [ { "annotations": {}, + "client_destination_port": 2468, "fqdn": "fqdn_value", "instance": "instance_value", "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "port": 453, } ] @@ -4098,9 +4017,11 @@ def test_detach_network_endpoints_rest_call_success(request_type): "network_endpoints": [ { "annotations": {}, + "client_destination_port": 2468, "fqdn": "fqdn_value", "instance": "instance_value", "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "port": 453, } ] @@ -4530,6 +4451,7 @@ def test_insert_rest_call_success(request_type): "network_endpoint_type": "network_endpoint_type_value", "psc_data": { "consumer_psc_address": "consumer_psc_address_value", + "producer_port": 1416, "psc_connection_id": 1793, "psc_connection_status": "psc_connection_status_value", }, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py index 15094ba9f0cf..248200c7c997 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py @@ -348,89 +348,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionNetworkFirewallPoliciesClient, - transports.RegionNetworkFirewallPoliciesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9870,42 +9787,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py index 371d5cff4ec6..66a03f6caa5a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py @@ -344,89 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionNotificationEndpointsClient, - transports.RegionNotificationEndpointsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py index 9c44e5e1cd9d..887773b60a40 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py @@ -313,85 +313,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionOperationsClient, transports.RegionOperationsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py index b645155e5011..4a383ccc74c0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionSecurityPoliciesClient, - transports.RegionSecurityPoliciesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5234,7 +5151,17 @@ def test_insert_rest_call_success(request_type): "auto_deploy_expiration_sec": 2785, "auto_deploy_impacted_baseline_threshold": 0.4121, "auto_deploy_load_threshold": 0.2768, + "detection_absolute_qps": 0.23520000000000002, + "detection_load_threshold": 0.2538, + "detection_relative_to_baseline_qps": 0.36010000000000003, "name": "name_value", + "traffic_granularity_configs": [ + { + "enable_each_unique_value": True, + "type_": "type__value", + "value": "value_value", + } + ], } ], } @@ -5745,7 +5672,17 @@ def test_patch_rest_call_success(request_type): "auto_deploy_expiration_sec": 2785, "auto_deploy_impacted_baseline_threshold": 0.4121, "auto_deploy_load_threshold": 0.2768, + "detection_absolute_qps": 0.23520000000000002, + "detection_load_threshold": 0.2538, + "detection_relative_to_baseline_qps": 0.36010000000000003, "name": "name_value", + "traffic_granularity_configs": [ + { + "enable_each_unique_value": True, + "type_": "type__value", + "value": "value_value", + } + ], } ], } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py index 59c296f291c2..15c39dff4cae 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionSslCertificatesClient, - transports.RegionSslCertificatesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py index 0b363004a3dd..6c3b759a4e5c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionSslPoliciesClient, transports.RegionSslPoliciesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py index ba9ed04184db..2bafa4c8a6c9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionTargetHttpProxiesClient, - transports.RegionTargetHttpProxiesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py index a77e9a80b727..d6a017a104a8 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionTargetHttpsProxiesClient, - transports.RegionTargetHttpsProxiesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3934,6 +3851,7 @@ def test_get_rest_call_success(request_type): server_tls_policy="server_tls_policy_value", ssl_certificates=["ssl_certificates_value"], ssl_policy="ssl_policy_value", + tls_early_data="tls_early_data_value", url_map="url_map_value", ) @@ -3966,6 +3884,7 @@ def test_get_rest_call_success(request_type): assert response.server_tls_policy == "server_tls_policy_value" assert response.ssl_certificates == ["ssl_certificates_value"] assert response.ssl_policy == "ssl_policy_value" + assert response.tls_early_data == "tls_early_data_value" assert response.url_map == "url_map_value" @@ -4080,6 +3999,7 @@ def test_insert_rest_call_success(request_type): "server_tls_policy": "server_tls_policy_value", "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], "ssl_policy": "ssl_policy_value", + "tls_early_data": "tls_early_data_value", "url_map": "url_map_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -4468,6 +4388,7 @@ def test_patch_rest_call_success(request_type): "server_tls_policy": "server_tls_policy_value", "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], "ssl_policy": "ssl_policy_value", + "tls_early_data": "tls_early_data_value", "url_map": "url_map_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py index 1084142cc685..9452891c094e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py @@ -336,89 +336,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - RegionTargetTcpProxiesClient, - transports.RegionTargetTcpProxiesRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py index 3315e7d5a947..ee5c2b559929 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -315,85 +315,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionUrlMapsClient, transports.RegionUrlMapsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3736,6 +3657,19 @@ def test_insert_rest_call_success(request_type): request_init = {"project": "sample1", "region": "sample2"} request_init["url_map_resource"] = { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -3822,6 +3756,7 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -3830,6 +3765,7 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -3838,6 +3774,7 @@ def test_insert_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ @@ -4256,6 +4193,19 @@ def test_patch_rest_call_success(request_type): request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} request_init["url_map_resource"] = { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -4342,6 +4292,7 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -4350,6 +4301,7 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -4358,6 +4310,7 @@ def test_patch_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ @@ -4653,6 +4606,19 @@ def test_update_rest_call_success(request_type): request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} request_init["url_map_resource"] = { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -4739,6 +4705,7 @@ def test_update_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -4747,6 +4714,7 @@ def test_update_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -4755,6 +4723,7 @@ def test_update_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ @@ -5051,6 +5020,19 @@ def test_validate_rest_call_success(request_type): request_init["region_url_maps_validate_request_resource"] = { "resource": { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -5140,6 +5122,7 @@ def test_validate_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -5148,6 +5131,7 @@ def test_validate_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -5156,6 +5140,7 @@ def test_validate_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py index 1b57301a6ace..52330f10a499 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py @@ -293,85 +293,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionZonesClient, transports.RegionZonesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py index bdf72eb03adf..c90bf14c6e22 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py @@ -273,85 +273,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RegionsClient, transports.RegionsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py index e917cda7411d..9ba74b308889 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py @@ -304,85 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ReservationsClient, transports.ReservationsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5434,42 +5355,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py index fae25a36f82c..c0bf8b296a45 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ResourcePoliciesClient, transports.ResourcePoliciesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5075,42 +4996,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py index 94d4d91118a2..21c00c53ee6c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py @@ -280,85 +280,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RoutersClient, transports.RoutersRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py index 2751e4e2423a..889a91f49b9c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py @@ -279,85 +279,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RoutesClient, transports.RoutesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py index 96c20c1b455d..d4bfa53e0905 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SecurityPoliciesClient, transports.SecurityPoliciesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6024,7 +5945,17 @@ def test_insert_rest_call_success(request_type): "auto_deploy_expiration_sec": 2785, "auto_deploy_impacted_baseline_threshold": 0.4121, "auto_deploy_load_threshold": 0.2768, + "detection_absolute_qps": 0.23520000000000002, + "detection_load_threshold": 0.2538, + "detection_relative_to_baseline_qps": 0.36010000000000003, "name": "name_value", + "traffic_granularity_configs": [ + { + "enable_each_unique_value": True, + "type_": "type__value", + "value": "value_value", + } + ], } ], } @@ -6657,7 +6588,17 @@ def test_patch_rest_call_success(request_type): "auto_deploy_expiration_sec": 2785, "auto_deploy_impacted_baseline_threshold": 0.4121, "auto_deploy_load_threshold": 0.2768, + "detection_absolute_qps": 0.23520000000000002, + "detection_load_threshold": 0.2538, + "detection_relative_to_baseline_qps": 0.36010000000000003, "name": "name_value", + "traffic_granularity_configs": [ + { + "enable_each_unique_value": True, + "type_": "type__value", + "value": "value_value", + } + ], } ], } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py index 04bc32ebb9f2..54da68c0eaf3 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py @@ -332,85 +332,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ServiceAttachmentsClient, transports.ServiceAttachmentsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4102,6 +4023,7 @@ def test_get_rest_call_success(request_type): name="name_value", nat_subnets=["nat_subnets_value"], producer_forwarding_rule="producer_forwarding_rule_value", + propagated_connection_limit=2868, reconcile_connections=True, region="region_value", self_link="self_link_value", @@ -4133,6 +4055,7 @@ def test_get_rest_call_success(request_type): assert response.name == "name_value" assert response.nat_subnets == ["nat_subnets_value"] assert response.producer_forwarding_rule == "producer_forwarding_rule_value" + assert response.propagated_connection_limit == 2868 assert response.reconcile_connections is True assert response.region == "region_value" assert response.self_link == "self_link_value" @@ -4359,6 +4282,7 @@ def test_insert_rest_call_success(request_type): { "consumer_network": "consumer_network_value", "endpoint": "endpoint_value", + "propagated_connection_count": 2878, "psc_connection_id": 1793, "status": "status_value", } @@ -4385,6 +4309,7 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "nat_subnets": ["nat_subnets_value1", "nat_subnets_value2"], "producer_forwarding_rule": "producer_forwarding_rule_value", + "propagated_connection_limit": 2868, "psc_service_attachment_id": {"high": 416, "low": 338}, "reconcile_connections": True, "region": "region_value", @@ -4761,6 +4686,7 @@ def test_patch_rest_call_success(request_type): { "consumer_network": "consumer_network_value", "endpoint": "endpoint_value", + "propagated_connection_count": 2878, "psc_connection_id": 1793, "status": "status_value", } @@ -4787,6 +4713,7 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "nat_subnets": ["nat_subnets_value1", "nat_subnets_value2"], "producer_forwarding_rule": "producer_forwarding_rule_value", + "propagated_connection_limit": 2868, "psc_service_attachment_id": {"high": 416, "low": 338}, "reconcile_connections": True, "region": "region_value", @@ -5065,42 +4992,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py index e880942cd5ff..faf681c86755 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py @@ -335,89 +335,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - SnapshotSettingsServiceClient, - transports.SnapshotSettingsServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py index 8955787274f3..6d6eec94a0f4 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py @@ -294,85 +294,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SnapshotsClient, transports.SnapshotsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4161,42 +4082,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py index d5872d0205f5..7e1b6b8982b8 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SslCertificatesClient, transports.SslCertificatesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py index dbc44d828869..5d359a75d47c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py @@ -300,85 +300,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SslPoliciesClient, transports.SslPoliciesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py index 64c2535f704d..93d856c66009 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py @@ -313,85 +313,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (StoragePoolTypesClient, transports.StoragePoolTypesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py index 7e60cfcfac79..3f5d68382965 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py @@ -304,85 +304,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (StoragePoolsClient, transports.StoragePoolsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5113,42 +5034,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py index 9c76455c444b..f95e007f6dff 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -300,85 +300,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SubnetworksClient, transports.SubnetworksRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6481,42 +6402,6 @@ def test_set_iam_policy_rest_call_success(request_type): "bindings": {}, "etag": "etag_value", "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], "version": 774, }, } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py index ad66e6310c6c..f06801e20255 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetGrpcProxiesClient, transports.TargetGrpcProxiesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py index 28dc281a14f0..ff2ad804d063 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py index bfdffab74960..1f94a24ff001 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py @@ -332,85 +332,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetHttpsProxiesClient, transports.TargetHttpsProxiesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5405,6 +5326,7 @@ def test_get_rest_call_success(request_type): server_tls_policy="server_tls_policy_value", ssl_certificates=["ssl_certificates_value"], ssl_policy="ssl_policy_value", + tls_early_data="tls_early_data_value", url_map="url_map_value", ) @@ -5437,6 +5359,7 @@ def test_get_rest_call_success(request_type): assert response.server_tls_policy == "server_tls_policy_value" assert response.ssl_certificates == ["ssl_certificates_value"] assert response.ssl_policy == "ssl_policy_value" + assert response.tls_early_data == "tls_early_data_value" assert response.url_map == "url_map_value" @@ -5549,6 +5472,7 @@ def test_insert_rest_call_success(request_type): "server_tls_policy": "server_tls_policy_value", "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], "ssl_policy": "ssl_policy_value", + "tls_early_data": "tls_early_data_value", "url_map": "url_map_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -5925,6 +5849,7 @@ def test_patch_rest_call_success(request_type): "server_tls_policy": "server_tls_policy_value", "ssl_certificates": ["ssl_certificates_value1", "ssl_certificates_value2"], "ssl_policy": "ssl_policy_value", + "tls_early_data": "tls_early_data_value", "url_map": "url_map_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py index 324b8018b35c..b39a05dd8014 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetInstancesClient, transports.TargetInstancesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py index 107f272c3e87..00e218974820 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py @@ -300,85 +300,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetPoolsClient, transports.TargetPoolsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py index 8965059f00a7..d924309b2a3e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetSslProxiesClient, transports.TargetSslProxiesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py index 5dc93493a26b..49c04c4d54ec 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py @@ -320,85 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetTcpProxiesClient, transports.TargetTcpProxiesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py index 8e46c247261d..82438edf225b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py @@ -328,85 +328,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TargetVpnGatewaysClient, transports.TargetVpnGatewaysRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py index 07196e0bded2..2d63f5c2c35c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py @@ -280,85 +280,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (UrlMapsClient, transports.UrlMapsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4375,6 +4296,19 @@ def test_insert_rest_call_success(request_type): request_init = {"project": "sample1"} request_init["url_map_resource"] = { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -4461,6 +4395,7 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -4469,6 +4404,7 @@ def test_insert_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -4477,6 +4413,7 @@ def test_insert_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ @@ -5125,6 +5062,19 @@ def test_patch_rest_call_success(request_type): request_init = {"project": "sample1", "url_map": "sample2"} request_init["url_map_resource"] = { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -5211,6 +5161,7 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -5219,6 +5170,7 @@ def test_patch_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -5227,6 +5179,7 @@ def test_patch_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ @@ -5518,6 +5471,19 @@ def test_update_rest_call_success(request_type): request_init = {"project": "sample1", "url_map": "sample2"} request_init["url_map_resource"] = { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -5604,6 +5570,7 @@ def test_update_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -5612,6 +5579,7 @@ def test_update_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -5620,6 +5588,7 @@ def test_update_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ @@ -5916,6 +5885,19 @@ def test_validate_rest_call_success(request_type): ], "resource": { "creation_timestamp": "creation_timestamp_value", + "default_custom_error_response_policy": { + "error_response_rules": [ + { + "match_response_codes": [ + "match_response_codes_value1", + "match_response_codes_value2", + ], + "override_response_code": 2344, + "path": "path_value", + } + ], + "error_service": "error_service_value", + }, "default_route_action": { "cors_policy": { "allow_credentials": True, @@ -6005,6 +5987,7 @@ def test_validate_rest_call_success(request_type): "name": "name_value", "path_matchers": [ { + "default_custom_error_response_policy": {}, "default_route_action": {}, "default_service": "default_service_value", "default_url_redirect": {}, @@ -6013,6 +5996,7 @@ def test_validate_rest_call_success(request_type): "name": "name_value", "path_rules": [ { + "custom_error_response_policy": {}, "paths": ["paths_value1", "paths_value2"], "route_action": {}, "service": "service_value", @@ -6021,6 +6005,7 @@ def test_validate_rest_call_success(request_type): ], "route_rules": [ { + "custom_error_response_policy": {}, "description": "description_value", "header_action": {}, "match_rules": [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py index 398fb9fa1d55..0f396a713d71 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py @@ -300,85 +300,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (VpnGatewaysClient, transports.VpnGatewaysRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py index 6cacf168c1ca..a458b42d1c40 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py @@ -296,85 +296,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (VpnTunnelsClient, transports.VpnTunnelsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py index aa550923abd7..9b80960f5c65 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py @@ -312,85 +312,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ZoneOperationsClient, transports.ZoneOperationsRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py index b2e8b8288236..66afb9cfbc46 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py @@ -268,85 +268,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ZonesClient, transports.ZonesRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-confidentialcomputing/CHANGELOG.md b/packages/google-cloud-confidentialcomputing/CHANGELOG.md index 6a5f1a01138a..8735e8541f1a 100644 --- a/packages/google-cloud-confidentialcomputing/CHANGELOG.md +++ b/packages/google-cloud-confidentialcomputing/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.4.12...google-cloud-confidentialcomputing-v0.4.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.4.11...google-cloud-confidentialcomputing-v0.4.12) (2024-10-24) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py index 1ea039704a5f..e2541c1ea369 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py @@ -466,36 +466,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConfidentialComputingClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -505,13 +475,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConfidentialComputingClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json index 01320d796a52..75ee32ba28cb 100644 --- a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-confidentialcomputing", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py index 95dc55470180..b3e488623d27 100644 --- a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py @@ -337,94 +337,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ConfidentialComputingClient, - transports.ConfidentialComputingGrpcTransport, - "grpc", - ), - ( - ConfidentialComputingClient, - transports.ConfidentialComputingRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-config/CHANGELOG.md b/packages/google-cloud-config/CHANGELOG.md index d25be5522426..02e15562c627 100644 --- a/packages/google-cloud-config/CHANGELOG.md +++ b/packages/google-cloud-config/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-config-v0.1.12...google-cloud-config-v0.1.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.1.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-config-v0.1.11...google-cloud-config-v0.1.12) (2024-10-24) diff --git a/packages/google-cloud-config/google/cloud/config/gapic_version.py b/packages/google-cloud-config/google/cloud/config/gapic_version.py index 17bbab4c1877..7daf9a1dd221 100644 --- a/packages/google-cloud-config/google/cloud/config/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.12" # {x-release-please-version} +__version__ = "0.1.13" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py index 17bbab4c1877..7daf9a1dd221 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.12" # {x-release-please-version} +__version__ = "0.1.13" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index 2d5959bd7a30..87fb2897e6c5 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -612,36 +612,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConfigClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -651,13 +621,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ConfigClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index 5b593664167e..b930b4cd32a8 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-config", - "version": "0.1.12" + "version": "0.1.13" }, "snippets": [ { diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index 60fb76c763bc..aca6fcb728b7 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -301,86 +301,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ConfigClient, transports.ConfigGrpcTransport, "grpc"), - (ConfigClient, transports.ConfigRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-contact-center-insights/CHANGELOG.md b/packages/google-cloud-contact-center-insights/CHANGELOG.md index ddb7f228b6fa..a3d9bb516036 100644 --- a/packages/google-cloud-contact-center-insights/CHANGELOG.md +++ b/packages/google-cloud-contact-center-insights/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.19.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.19.0...google-cloud-contact-center-insights-v1.19.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [1.19.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.18.0...google-cloud-contact-center-insights-v1.19.0) (2024-10-24) diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py index f1337c609ff8..68899264edad 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "1.19.1" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py index f1337c609ff8..68899264edad 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "1.19.1" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index b9ab0723ffe0..084d0e6239c6 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -701,36 +701,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ContactCenterInsightsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -740,13 +710,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ContactCenterInsightsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index 603c1afb27d7..90d789df11c2 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contact-center-insights", - "version": "1.19.0" + "version": "1.19.1" }, "snippets": [ { diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index 8356d8f5c948..bf8f9a66c041 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -352,94 +352,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - ContactCenterInsightsClient, - transports.ContactCenterInsightsGrpcTransport, - "grpc", - ), - ( - ContactCenterInsightsClient, - transports.ContactCenterInsightsRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md index 33a90e7f4cb6..8df7eb3fe86c 100644 --- a/packages/google-cloud-container/CHANGELOG.md +++ b/packages/google-cloud-container/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-container/#history +## [2.54.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.53.0...google-cloud-container-v2.54.0) (2024-11-11) + + +### Features + +* add desired_enterprise_config,desired_node_pool_auto_config_linux_node_config to ClusterUpdate. ([4fdf249](https://github.com/googleapis/google-cloud-python/commit/4fdf24960b3966193516d6f16900df1409165376)) +* add desired_tier to EnterpriseConfig. ([4fdf249](https://github.com/googleapis/google-cloud-python/commit/4fdf24960b3966193516d6f16900df1409165376)) +* add DesiredEnterpriseConfig proto message ([4fdf249](https://github.com/googleapis/google-cloud-python/commit/4fdf24960b3966193516d6f16900df1409165376)) +* add LinuxNodeConfig in NodePoolAutoConfig ([4fdf249](https://github.com/googleapis/google-cloud-python/commit/4fdf24960b3966193516d6f16900df1409165376)) +* add LocalSsdEncryptionMode in NodeConfig ([4fdf249](https://github.com/googleapis/google-cloud-python/commit/4fdf24960b3966193516d6f16900df1409165376)) +* add UpgradeInfoEvent proto message ([4fdf249](https://github.com/googleapis/google-cloud-python/commit/4fdf24960b3966193516d6f16900df1409165376)) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + + +### Documentation + +* Minor documentation updates ([4fdf249](https://github.com/googleapis/google-cloud-python/commit/4fdf24960b3966193516d6f16900df1409165376)) + ## [2.53.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.52.0...google-cloud-container-v2.53.0) (2024-10-24) diff --git a/packages/google-cloud-container/google/cloud/container/__init__.py b/packages/google-cloud-container/google/cloud/container/__init__.py index 9127673766b0..028dbd9952c0 100644 --- a/packages/google-cloud-container/google/cloud/container/__init__.py +++ b/packages/google-cloud-container/google/cloud/container/__init__.py @@ -64,6 +64,7 @@ DefaultSnatStatus, DeleteClusterRequest, DeleteNodePoolRequest, + DesiredEnterpriseConfig, DnsCacheConfig, DNSConfig, EnterpriseConfig, @@ -193,6 +194,7 @@ UpdateNodePoolRequest, UpgradeAvailableEvent, UpgradeEvent, + UpgradeInfoEvent, UpgradeResourceType, UsableSubnetwork, UsableSubnetworkSecondaryRange, @@ -246,6 +248,7 @@ "DefaultSnatStatus", "DeleteClusterRequest", "DeleteNodePoolRequest", + "DesiredEnterpriseConfig", "DnsCacheConfig", "DNSConfig", "EnterpriseConfig", @@ -370,6 +373,7 @@ "UpdateNodePoolRequest", "UpgradeAvailableEvent", "UpgradeEvent", + "UpgradeInfoEvent", "UsableSubnetwork", "UsableSubnetworkSecondaryRange", "UserManagedKeysConfig", diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 1fddec69a0c4..a754aa189117 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.53.0" # {x-release-please-version} +__version__ = "2.54.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/__init__.py index d267c47807c8..37aafd0523ff 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1/__init__.py @@ -59,6 +59,7 @@ DefaultSnatStatus, DeleteClusterRequest, DeleteNodePoolRequest, + DesiredEnterpriseConfig, DnsCacheConfig, DNSConfig, EnterpriseConfig, @@ -188,6 +189,7 @@ UpdateNodePoolRequest, UpgradeAvailableEvent, UpgradeEvent, + UpgradeInfoEvent, UpgradeResourceType, UsableSubnetwork, UsableSubnetworkSecondaryRange, @@ -243,6 +245,7 @@ "DefaultSnatStatus", "DeleteClusterRequest", "DeleteNodePoolRequest", + "DesiredEnterpriseConfig", "DnsCacheConfig", "EnterpriseConfig", "EphemeralStorageLocalSsdConfig", @@ -371,6 +374,7 @@ "UpdateNodePoolRequest", "UpgradeAvailableEvent", "UpgradeEvent", + "UpgradeInfoEvent", "UpgradeResourceType", "UsableSubnetwork", "UsableSubnetworkSecondaryRange", diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 1fddec69a0c4..a754aa189117 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.53.0" # {x-release-please-version} +__version__ = "2.54.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py index 75c0a1e5b254..9e5cd6cd33d3 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py @@ -506,36 +506,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ClusterManagerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -545,13 +515,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ClusterManagerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py index a95641b09403..7750e2edf29e 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py @@ -53,6 +53,7 @@ DefaultSnatStatus, DeleteClusterRequest, DeleteNodePoolRequest, + DesiredEnterpriseConfig, DnsCacheConfig, DNSConfig, EnterpriseConfig, @@ -182,6 +183,7 @@ UpdateNodePoolRequest, UpgradeAvailableEvent, UpgradeEvent, + UpgradeInfoEvent, UpgradeResourceType, UsableSubnetwork, UsableSubnetworkSecondaryRange, @@ -233,6 +235,7 @@ "DefaultSnatStatus", "DeleteClusterRequest", "DeleteNodePoolRequest", + "DesiredEnterpriseConfig", "DnsCacheConfig", "DNSConfig", "EnterpriseConfig", @@ -357,6 +360,7 @@ "UpdateNodePoolRequest", "UpgradeAvailableEvent", "UpgradeEvent", + "UpgradeInfoEvent", "UsableSubnetwork", "UsableSubnetworkSecondaryRange", "UserManagedKeysConfig", diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py index 6cb514db5037..7e4d3f93d382 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -91,6 +91,7 @@ "ClusterUpdate", "AdditionalPodRangesConfig", "RangeInfo", + "DesiredEnterpriseConfig", "Operation", "OperationProgress", "CreateClusterRequest", @@ -182,6 +183,7 @@ "NotificationConfig", "ConfidentialNodes", "UpgradeEvent", + "UpgradeInfoEvent", "UpgradeAvailableEvent", "SecurityBulletinEvent", "Autopilot", @@ -359,7 +361,8 @@ class LinuxNodeConfig(proto.Message): net.core.netdev_max_backlog net.core.rmem_max net.core.wmem_default net.core.wmem_max net.core.optmem_max net.core.somaxconn net.ipv4.tcp_rmem net.ipv4.tcp_wmem - net.ipv4.tcp_tw_reuse + net.ipv4.tcp_tw_reuse kernel.shmmni kernel.shmmax + kernel.shmall cgroup_mode (google.cloud.container_v1.types.LinuxNodeConfig.CgroupMode): cgroup_mode specifies the cgroup mode to be used on the node. @@ -780,6 +783,11 @@ class NodeConfig(proto.Message): Secondary boot disk update strategy. This field is a member of `oneof`_ ``_secondary_boot_disk_update_strategy``. + local_ssd_encryption_mode (google.cloud.container_v1.types.NodeConfig.LocalSsdEncryptionMode): + Specifies which method should be used for + encrypting the Local SSDs attahced to the node. + + This field is a member of `oneof`_ ``_local_ssd_encryption_mode``. effective_cgroup_mode (google.cloud.container_v1.types.NodeConfig.EffectiveCgroupMode): Output only. effective_cgroup_mode is the cgroup mode actually used by the node pool. It is determined by the @@ -787,6 +795,29 @@ class NodeConfig(proto.Message): cgroup mode based on the cluster creation version. """ + class LocalSsdEncryptionMode(proto.Enum): + r"""LocalSsdEncryptionMode specifies the method used for + encrypting the Local SSDs attached to the node. + + Values: + LOCAL_SSD_ENCRYPTION_MODE_UNSPECIFIED (0): + The given node will be encrypted using keys + managed by Google infrastructure and the keys + will be deleted when the node is deleted. + STANDARD_ENCRYPTION (1): + The given node will be encrypted using keys + managed by Google infrastructure and the keys + will be deleted when the node is deleted. + EPHEMERAL_KEY_ENCRYPTION (2): + The given node will opt-in for using + ephemeral key for encryption of Local SSDs. + The Local SSDs will not be able to recover data + in case of node crash. + """ + LOCAL_SSD_ENCRYPTION_MODE_UNSPECIFIED = 0 + STANDARD_ENCRYPTION = 1 + EPHEMERAL_KEY_ENCRYPTION = 2 + class EffectiveCgroupMode(proto.Enum): r"""Possible effective cgroup modes for the node. @@ -995,6 +1026,12 @@ class EffectiveCgroupMode(proto.Enum): message="SecondaryBootDiskUpdateStrategy", ) ) + local_ssd_encryption_mode: LocalSsdEncryptionMode = proto.Field( + proto.ENUM, + number=54, + optional=True, + enum=LocalSsdEncryptionMode, + ) effective_cgroup_mode: EffectiveCgroupMode = proto.Field( proto.ENUM, number=55, @@ -3750,6 +3787,9 @@ class NodePoolAutoConfig(proto.Message): Currently only ``insecure_kubelet_readonly_port_enabled`` can be set here. + linux_node_config (google.cloud.container_v1.types.LinuxNodeConfig): + Output only. Configuration options for Linux + nodes. """ network_tags: "NetworkTags" = proto.Field( @@ -3767,6 +3807,11 @@ class NodePoolAutoConfig(proto.Message): number=3, message="NodeKubeletConfig", ) + linux_node_config: "LinuxNodeConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="LinuxNodeConfig", + ) class NodePoolDefaults(proto.Message): @@ -4119,6 +4164,15 @@ class ClusterUpdate(proto.Message): created. This field is a member of `oneof`_ ``_desired_rbac_binding_config``. + desired_enterprise_config (google.cloud.container_v1.types.DesiredEnterpriseConfig): + The desired enterprise configuration for the + cluster. + desired_node_pool_auto_config_linux_node_config (google.cloud.container_v1.types.LinuxNodeConfig): + The desired Linux node config for all auto-provisioned node + pools in autopilot clusters and node auto-provisioning + enabled clusters. + + Currently only ``cgroup_mode`` can be set here. """ desired_node_version: str = proto.Field( @@ -4431,6 +4485,16 @@ class ClusterUpdate(proto.Message): optional=True, message="RBACBindingConfig", ) + desired_enterprise_config: "DesiredEnterpriseConfig" = proto.Field( + proto.MESSAGE, + number=147, + message="DesiredEnterpriseConfig", + ) + desired_node_pool_auto_config_linux_node_config: "LinuxNodeConfig" = proto.Field( + proto.MESSAGE, + number=150, + message="LinuxNodeConfig", + ) class AdditionalPodRangesConfig(proto.Message): @@ -4478,6 +4542,22 @@ class RangeInfo(proto.Message): ) +class DesiredEnterpriseConfig(proto.Message): + r"""DesiredEnterpriseConfig is a wrapper used for updating + enterprise_config. + + Attributes: + desired_tier (google.cloud.container_v1.types.EnterpriseConfig.ClusterTier): + desired_tier specifies the desired tier of the cluster. + """ + + desired_tier: "EnterpriseConfig.ClusterTier" = proto.Field( + proto.ENUM, + number=1, + enum="EnterpriseConfig.ClusterTier", + ) + + class Operation(proto.Message): r"""This operation resource represents operations that may have happened or are happening on the cluster. All fields are output @@ -7624,10 +7704,11 @@ class NodePoolAutoscaling(proto.Message): enabled (bool): Is autoscaling enabled for this node pool. min_node_count (int): - Minimum number of nodes for one location in the NodePool. - Must be >= 1 and <= max_node_count. + Minimum number of nodes for one location in the node pool. + Must be greater than or equal to 0 and less than or equal to + max_node_count. max_node_count (int): - Maximum number of nodes for one location in the NodePool. + Maximum number of nodes for one location in the node pool. Must be >= min_node_count. There has to be enough quota to scale up the cluster. autoprovisioned (bool): @@ -7637,14 +7718,14 @@ class NodePoolAutoscaling(proto.Message): nodepool. total_min_node_count (int): Minimum number of nodes in the node pool. Must be greater - than 1 less than total_max_node_count. The - total_*_node_count fields are mutually exclusive with the - \*_node_count fields. + than or equal to 0 and less than or equal to + total_max_node_count. The total_*_node_count fields are + mutually exclusive with the \*_node_count fields. total_max_node_count (int): Maximum number of nodes in the node pool. Must be greater - than total_min_node_count. There has to be enough quota to - scale up the cluster. The total_*_node_count fields are - mutually exclusive with the \*_node_count fields. + than or equal to total_min_node_count. There has to be + enough quota to scale up the cluster. The total_*_node_count + fields are mutually exclusive with the \*_node_count fields. """ class LocationPolicy(proto.Enum): @@ -9636,6 +9717,98 @@ class UpgradeEvent(proto.Message): ) +class UpgradeInfoEvent(proto.Message): + r"""UpgradeInfoEvent is a notification sent to customers about + the upgrade information of a resource. + + Attributes: + resource_type (google.cloud.container_v1.types.UpgradeResourceType): + The resource type associated with the + upgrade. + operation (str): + The operation associated with this upgrade. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the operation was started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the operation ended. + current_version (str): + The current version before the upgrade. + target_version (str): + The target version for the upgrade. + resource (str): + Optional relative path to the resource. For + example in node pool upgrades, the relative path + of the node pool. + state (google.cloud.container_v1.types.UpgradeInfoEvent.State): + Output only. The state of the upgrade. + description (str): + A brief description of the event. + """ + + class State(proto.Enum): + r"""The state of the upgrade. + + Values: + STATE_UNSPECIFIED (0): + STATE_UNSPECIFIED indicates the state is unspecified. + STARTED (3): + STARTED indicates the upgrade has started. + SUCCEEDED (4): + SUCCEEDED indicates the upgrade has completed + successfully. + FAILED (5): + FAILED indicates the upgrade has failed. + CANCELED (6): + CANCELED indicates the upgrade has canceled. + """ + STATE_UNSPECIFIED = 0 + STARTED = 3 + SUCCEEDED = 4 + FAILED = 5 + CANCELED = 6 + + resource_type: "UpgradeResourceType" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeResourceType", + ) + operation: str = proto.Field( + proto.STRING, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + current_version: str = proto.Field( + proto.STRING, + number=5, + ) + target_version: str = proto.Field( + proto.STRING, + number=6, + ) + resource: str = proto.Field( + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + description: str = proto.Field( + proto.STRING, + number=11, + ) + + class UpgradeAvailableEvent(proto.Message): r"""UpgradeAvailableEvent is a notification sent to customers when a new available version is released. @@ -10375,6 +10548,8 @@ class EnterpriseConfig(proto.Message): cluster_tier (google.cloud.container_v1.types.EnterpriseConfig.ClusterTier): Output only. cluster_tier indicates the effective tier of the cluster. + desired_tier (google.cloud.container_v1.types.EnterpriseConfig.ClusterTier): + desired_tier specifies the desired tier of the cluster. """ class ClusterTier(proto.Enum): @@ -10398,6 +10573,11 @@ class ClusterTier(proto.Enum): number=1, enum=ClusterTier, ) + desired_tier: ClusterTier = proto.Field( + proto.ENUM, + number=2, + enum=ClusterTier, + ) class SecretManagerConfig(proto.Message): diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 1fddec69a0c4..a754aa189117 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.53.0" # {x-release-please-version} +__version__ = "2.54.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py index 13487da71d47..ce533e8e0a44 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py @@ -456,36 +456,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ClusterManagerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -495,13 +465,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ClusterManagerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index bb340bf23f09..2c89b716fed1 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.53.0" + "version": "2.54.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index 98c22cf48897..9a1710f6c1c9 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.53.0" + "version": "2.54.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py index 6b59ca97c527..335cbc3a0795 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py @@ -323,86 +323,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc"), - (ClusterManagerClient, transports.ClusterManagerRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py index caa61018517f..593f7711426f 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py @@ -318,85 +318,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-containeranalysis/CHANGELOG.md b/packages/google-cloud-containeranalysis/CHANGELOG.md index aa1acf21652d..c1f8100e4bb8 100644 --- a/packages/google-cloud-containeranalysis/CHANGELOG.md +++ b/packages/google-cloud-containeranalysis/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.15.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-containeranalysis-v2.15.0...google-cloud-containeranalysis-v2.15.1) (2024-11-11) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + ## [2.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-containeranalysis-v2.14.5...google-cloud-containeranalysis-v2.15.0) (2024-10-24) diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py index 60165d54bf8a..c7d2e6b06092 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py index 60165d54bf8a..c7d2e6b06092 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py index 3b0dec1774db..05d27b82ffaf 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py @@ -458,36 +458,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ContainerAnalysisClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -497,13 +467,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ContainerAnalysisClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json index 102e20913343..813d8373f741 100644 --- a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json +++ b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-containeranalysis", - "version": "2.15.0" + "version": "2.15.1" }, "snippets": [ { diff --git a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py index 0463344ce81b..531c34d66051 100644 --- a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py +++ b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py @@ -331,86 +331,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ContainerAnalysisClient, transports.ContainerAnalysisGrpcTransport, "grpc"), - (ContainerAnalysisClient, transports.ContainerAnalysisRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-contentwarehouse/CHANGELOG.md b/packages/google-cloud-contentwarehouse/CHANGELOG.md index 4ffa38187552..e5498ffd9b9f 100644 --- a/packages/google-cloud-contentwarehouse/CHANGELOG.md +++ b/packages/google-cloud-contentwarehouse/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.7.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.7.10...google-cloud-contentwarehouse-v0.7.11) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.7.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.7.9...google-cloud-contentwarehouse-v0.7.10) (2024-10-24) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py index a2381e52ccc8..2abd5a01e3d6 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.10" # {x-release-please-version} +__version__ = "0.7.11" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py index a2381e52ccc8..2abd5a01e3d6 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.10" # {x-release-please-version} +__version__ = "0.7.11" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py index f471d7e4055a..afbf8681d390 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py @@ -491,36 +491,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DocumentLinkServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -530,13 +500,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DocumentLinkServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py index bf9bc88a165a..1eb0c4a265c9 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py @@ -487,36 +487,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DocumentSchemaServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -526,13 +496,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DocumentSchemaServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py index 704d3e272d38..d72800798906 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py @@ -511,36 +511,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DocumentServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -550,13 +520,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DocumentServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py index 613390fa4b26..b1df7f0b7659 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py @@ -481,36 +481,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PipelineServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -520,13 +490,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PipelineServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py index 7588c418229a..34dfb64600e6 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py @@ -502,36 +502,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = RuleSetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -541,13 +511,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or RuleSetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py index db6940d5f1a9..59ac12cd7dd4 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py @@ -483,36 +483,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SynonymSetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -522,13 +492,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SynonymSetServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json index d05c1097114e..1edb6ed03680 100644 --- a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contentwarehouse", - "version": "0.7.10" + "version": "0.7.11" }, "snippets": [ { diff --git a/packages/google-cloud-contentwarehouse/setup.py b/packages/google-cloud-contentwarehouse/setup.py index b33a14f0957b..a04ce63bb651 100644 --- a/packages/google-cloud-contentwarehouse/setup.py +++ b/packages/google-cloud-contentwarehouse/setup.py @@ -48,7 +48,7 @@ "proto-plus >= 1.22.3, <2.0.0dev", "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-cloud-documentai >= 2.0.0, <3.0.0dev", + "google-cloud-documentai >= 2.0.0, <4.0.0dev", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] extras = {} diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py index 8719c6a72dfa..a9065d3872b0 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py @@ -339,94 +339,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DocumentLinkServiceClient, - transports.DocumentLinkServiceGrpcTransport, - "grpc", - ), - ( - DocumentLinkServiceClient, - transports.DocumentLinkServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py index 08b77780da53..cbc4a7bc860c 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py @@ -341,94 +341,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DocumentSchemaServiceClient, - transports.DocumentSchemaServiceGrpcTransport, - "grpc", - ), - ( - DocumentSchemaServiceClient, - transports.DocumentSchemaServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py index 3979662a0f5d..fad5ef9c602e 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py @@ -344,86 +344,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc"), - (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py index 94ad45d2a36c..68584d2b2dd6 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py @@ -329,86 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc"), - (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py index 9c8e942614e0..cf7df9f6e207 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py @@ -321,86 +321,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc"), - (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py index f2017d3f10c6..09d8d00a5e40 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py @@ -331,86 +331,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc"), - (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-data-fusion/CHANGELOG.md b/packages/google-cloud-data-fusion/CHANGELOG.md index 6ca5b517e93a..da9e1261ed76 100644 --- a/packages/google-cloud-data-fusion/CHANGELOG.md +++ b/packages/google-cloud-data-fusion/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.11.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-fusion-v1.11.0...google-cloud-data-fusion-v1.11.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-fusion-v1.10.5...google-cloud-data-fusion-v1.11.0) (2024-10-24) diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py index 50d842f376d0..b50cada0b7ee 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py index 50d842f376d0..b50cada0b7ee 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py index 1fb70174685f..cf39d118fcfa 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py @@ -492,36 +492,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataFusionClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -531,13 +501,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataFusionClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json index b32a43bdfc4a..b41568b9bbc7 100644 --- a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json +++ b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-fusion", - "version": "1.11.0" + "version": "1.11.1" }, "snippets": [ { diff --git a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py index 2189f4bff846..e52ab4ef600f 100644 --- a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py +++ b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py @@ -308,86 +308,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataFusionClient, transports.DataFusionGrpcTransport, "grpc"), - (DataFusionClient, transports.DataFusionRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-data-qna/CHANGELOG.md b/packages/google-cloud-data-qna/CHANGELOG.md index 56160c2d9e32..d92e099d6836 100644 --- a/packages/google-cloud-data-qna/CHANGELOG.md +++ b/packages/google-cloud-data-qna/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.10.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-qna-v0.10.12...google-cloud-data-qna-v0.10.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.10.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-qna-v0.10.11...google-cloud-data-qna-v0.10.12) (2024-10-24) diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py index dbb72468cf59..3de369591024 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.12" # {x-release-please-version} +__version__ = "0.10.13" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py index dbb72468cf59..3de369591024 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.12" # {x-release-please-version} +__version__ = "0.10.13" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py index 6de5b4c512ba..f0830404cdd8 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py @@ -511,36 +511,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AutoSuggestionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -550,13 +520,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AutoSuggestionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py index a1793ae50ed9..9133706d7721 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py @@ -507,36 +507,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = QuestionServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -546,13 +516,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or QuestionServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json index c7642dc3d0a2..ddd45f0cdee4 100644 --- a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json +++ b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-qna", - "version": "0.10.12" + "version": "0.10.13" }, "snippets": [ { diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py index a14557af276e..8601ec13a534 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py @@ -334,94 +334,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AutoSuggestionServiceClient, - transports.AutoSuggestionServiceGrpcTransport, - "grpc", - ), - ( - AutoSuggestionServiceClient, - transports.AutoSuggestionServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py index 73c312970063..0bab8ee42283 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py @@ -326,86 +326,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (QuestionServiceClient, transports.QuestionServiceGrpcTransport, "grpc"), - (QuestionServiceClient, transports.QuestionServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md index ce5941de19ee..6e6240d17b4e 100644 --- a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md +++ b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.3.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.3.9...google-cloud-datacatalog-lineage-v0.3.10) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.3.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.3.8...google-cloud-datacatalog-lineage-v0.3.9) (2024-10-24) diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py index d24d3fba9057..3425e3287cda 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.9" # {x-release-please-version} +__version__ = "0.3.10" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py index d24d3fba9057..3425e3287cda 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.9" # {x-release-please-version} +__version__ = "0.3.10" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py index 74b00d699b5b..703d05764ee2 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py @@ -522,36 +522,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = LineageClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -561,13 +531,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or LineageClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json index b3c5117586d8..3abf1395c457 100644 --- a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json +++ b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog-lineage", - "version": "0.3.9" + "version": "0.3.10" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py index 6d5913e203ae..5efbc8276517 100644 --- a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py +++ b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py @@ -297,86 +297,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (LineageClient, transports.LineageGrpcTransport, "grpc"), - (LineageClient, transports.LineageRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/CHANGELOG.md b/packages/google-cloud-datacatalog/CHANGELOG.md index e0f697d191f1..77d144fa2da2 100644 --- a/packages/google-cloud-datacatalog/CHANGELOG.md +++ b/packages/google-cloud-datacatalog/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## [3.21.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.21.0...google-cloud-datacatalog-v3.21.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [3.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.20.1...google-cloud-datacatalog-v3.21.0) (2024-10-24) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index c843b7364c5d..cb24f2360f5a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index c843b7364c5d..cb24f2360f5a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py index 3e92d17deb06..ab94e4e24b8c 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -601,36 +601,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataCatalogClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -640,13 +610,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataCatalogClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py index 80991e5efa03..e0b8a49d9e11 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -493,36 +493,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PolicyTagManagerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -532,13 +502,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PolicyTagManagerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py index fc21f2be6f2c..6c90b1f76b28 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -477,36 +477,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PolicyTagManagerSerializationClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -516,13 +486,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PolicyTagManagerSerializationClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index c843b7364c5d..cb24f2360f5a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index 1760cad85812..5610b7a2f99f 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -598,36 +598,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataCatalogClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -637,13 +607,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataCatalogClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index 420376ff3822..aef9c262cac6 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -489,36 +489,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PolicyTagManagerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -528,13 +498,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PolicyTagManagerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index 179bf24456d1..25ac84fdc50f 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -474,36 +474,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PolicyTagManagerSerializationClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -513,13 +483,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PolicyTagManagerSerializationClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index d5d949a57dd6..d2f74c49388e 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.21.0" + "version": "3.21.1" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index 19721102705f..16437f0e222d 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.21.0" + "version": "3.21.1" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index e90c5f1e70d0..a330df809776 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -324,85 +324,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index f496e975a219..972254a07769 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -321,85 +321,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py index f964eab46dcd..c3975dc8fb0f 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -349,89 +349,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - PolicyTagManagerSerializationClient, - transports.PolicyTagManagerSerializationGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 5e2d2b936c65..bfab0c177432 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -311,85 +311,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index 57e1f17d36f0..810cb501ae32 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -321,85 +321,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index 903f1ec35238..ec57ea454df9 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -348,89 +348,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - PolicyTagManagerSerializationClient, - transports.PolicyTagManagerSerializationGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataflow-client/CHANGELOG.md b/packages/google-cloud-dataflow-client/CHANGELOG.md index 1fbe8ec0d58a..05163fe670f2 100644 --- a/packages/google-cloud-dataflow-client/CHANGELOG.md +++ b/packages/google-cloud-dataflow-client/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.8.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataflow-client-v0.8.13...google-cloud-dataflow-client-v0.8.14) (2024-11-11) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + ## [0.8.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataflow-client-v0.8.12...google-cloud-dataflow-client-v0.8.13) (2024-10-24) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py index 7e84c98cf883..e210acc147fa 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.13" # {x-release-please-version} +__version__ = "0.8.14" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py index 7e84c98cf883..e210acc147fa 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.13" # {x-release-please-version} +__version__ = "0.8.14" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py index 71e049ca39e5..efe8d4659faf 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -440,36 +440,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FlexTemplatesServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -479,13 +449,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FlexTemplatesServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py index 1f3019cd8a89..c1c135e57f95 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -443,36 +443,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = JobsV1Beta3Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -482,13 +452,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or JobsV1Beta3Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py index c32452797c5a..d0fa910acb3c 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -441,36 +441,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MessagesV1Beta3Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -480,13 +450,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MessagesV1Beta3Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py index 9c6570dea285..028e20a2a4b7 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -443,36 +443,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MetricsV1Beta3Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -482,13 +452,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MetricsV1Beta3Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py index 568607068769..c67cd9e45839 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -443,36 +443,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SnapshotsV1Beta3Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -482,13 +452,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SnapshotsV1Beta3Client._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py index 53466a958396..a2aa8bc13ddd 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -443,36 +443,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TemplatesServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -482,13 +452,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TemplatesServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json index 4ac5f8638ca0..8ef258ff542e 100644 --- a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json +++ b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataflow-client", - "version": "0.8.13" + "version": "0.8.14" }, "snippets": [ { diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index 9f17c1c01342..e2a90857777a 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -332,94 +332,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - FlexTemplatesServiceClient, - transports.FlexTemplatesServiceGrpcTransport, - "grpc", - ), - ( - FlexTemplatesServiceClient, - transports.FlexTemplatesServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index 9685bbc2d89d..f793b62dcd57 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -303,86 +303,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc"), - (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index 90347c6fe0ab..d6ef650a385d 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -319,86 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc"), - (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index 59fd01c605fb..5b739b491ec3 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -319,86 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc"), - (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index d030c0faabf5..2ba1f6ad5e76 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -320,86 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc"), - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index c92481b04820..c11d688433e5 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -320,86 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc"), - (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataform/CHANGELOG.md b/packages/google-cloud-dataform/CHANGELOG.md index a842ff3a5021..dde302be4912 100644 --- a/packages/google-cloud-dataform/CHANGELOG.md +++ b/packages/google-cloud-dataform/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.5.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataform-v0.5.12...google-cloud-dataform-v0.5.13) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.5.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataform-v0.5.11...google-cloud-dataform-v0.5.12) (2024-10-24) diff --git a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py index cc43a639a105..bf678492aaad 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.12" # {x-release-please-version} +__version__ = "0.5.13" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py index cc43a639a105..bf678492aaad 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.12" # {x-release-please-version} +__version__ = "0.5.13" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py index 1642af58d116..33da795ded13 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py @@ -611,36 +611,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataformClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -650,13 +620,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataformClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json index b2b1fa7538fc..096b7975227d 100644 --- a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json +++ b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataform", - "version": "0.5.12" + "version": "0.5.13" }, "snippets": [ { diff --git a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py index e57acb0c2c4c..6bf0b6f1180b 100644 --- a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py +++ b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py @@ -299,86 +299,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataformClient, transports.DataformGrpcTransport, "grpc"), - (DataformClient, transports.DataformRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datalabeling/CHANGELOG.md b/packages/google-cloud-datalabeling/CHANGELOG.md index f5419aaf8955..d75319c9aee4 100644 --- a/packages/google-cloud-datalabeling/CHANGELOG.md +++ b/packages/google-cloud-datalabeling/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datalabeling/#history +## [1.11.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datalabeling-v1.11.0...google-cloud-datalabeling-v1.11.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datalabeling-v1.10.5...google-cloud-datalabeling-v1.11.0) (2024-10-24) diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py index 50d842f376d0..b50cada0b7ee 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py index 50d842f376d0..b50cada0b7ee 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py index 1cd66859e9ed..ff8e5242b10e 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py @@ -621,36 +621,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataLabelingServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -660,13 +630,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataLabelingServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json index f9b2f3ab241c..de5f79447163 100644 --- a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json +++ b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datalabeling", - "version": "1.11.0" + "version": "1.11.1" }, "snippets": [ { diff --git a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py index 19c3ca7020d9..634bca8e777b 100644 --- a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py +++ b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py @@ -357,89 +357,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DataLabelingServiceClient, - transports.DataLabelingServiceGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/CHANGELOG.md b/packages/google-cloud-dataplex/CHANGELOG.md index 204b9589e081..36d8806cd8cd 100644 --- a/packages/google-cloud-dataplex/CHANGELOG.md +++ b/packages/google-cloud-dataplex/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.3.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.3.0...google-cloud-dataplex-v2.3.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [2.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.2.2...google-cloud-dataplex-v2.3.0) (2024-10-24) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index 38b74a960221..dd79fdad215c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.3.0" # {x-release-please-version} +__version__ = "2.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index 38b74a960221..dd79fdad215c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.3.0" # {x-release-please-version} +__version__ = "2.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 953065ec8f72..6c5aeb8272cf 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -547,36 +547,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CatalogServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -586,13 +556,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CatalogServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index 931da4dd7e72..5b521dda8a55 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -492,36 +492,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ContentServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -531,13 +501,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ContentServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index ab52839b7c22..03f256f07fdc 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -530,36 +530,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataScanServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -569,13 +539,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataScanServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 560cbd8e541c..e42d1677cd1d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -520,36 +520,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataTaxonomyServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -559,13 +529,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataTaxonomyServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index 97a0c4ef7f95..5e488a12b5fa 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -653,36 +653,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataplexServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -692,13 +662,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataplexServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py index 932feff8afd7..8ada13d7743b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -525,36 +525,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MetadataServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -564,13 +534,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or MetadataServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index d441efaa45ce..e466410d1430 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "2.3.0" + "version": "2.3.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index 6d11066066e2..6198538a8234 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -331,85 +331,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py index 29fa13129c99..3f618ec57c6c 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -323,85 +323,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index b7e0f9a54d5f..ada3a77c02bd 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -337,85 +337,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataScanServiceClient, transports.DataScanServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py index 899af08114ae..cbe00d789c16 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -347,89 +347,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DataTaxonomyServiceClient, - transports.DataTaxonomyServiceGrpcTransport, - "grpc", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index 2e3867d1ded6..4f51ecd23937 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -331,85 +331,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataplexServiceClient, transports.DataplexServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index 47cb77d2d6c1..7c4bfd0cb98b 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -319,85 +319,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc-metastore/CHANGELOG.md b/packages/google-cloud-dataproc-metastore/CHANGELOG.md index da03744ff54c..3b8f38deb616 100644 --- a/packages/google-cloud-dataproc-metastore/CHANGELOG.md +++ b/packages/google-cloud-dataproc-metastore/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.16.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-metastore-v1.16.0...google-cloud-dataproc-metastore-v1.16.1) (2024-11-11) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + ## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-metastore-v1.15.5...google-cloud-dataproc-metastore-v1.16.0) (2024-10-24) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py index 3e0ea3b28f0a..b6e92d4eebd5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py index 3e0ea3b28f0a..b6e92d4eebd5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py index 209ae0b88b2b..ebd4b4426d95 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py @@ -582,36 +582,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataprocMetastoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -621,13 +591,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataprocMetastoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py index 117d755e2ecb..009727cac4ea 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py @@ -494,36 +494,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataprocMetastoreFederationClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -533,13 +503,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataprocMetastoreFederationClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py index 3e0ea3b28f0a..b6e92d4eebd5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py index 1ff79928f905..3c34a1d2c38b 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py @@ -604,36 +604,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataprocMetastoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -643,13 +613,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataprocMetastoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py index a7f9e069e5e2..4b0074b0859e 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py @@ -494,36 +494,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataprocMetastoreFederationClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -533,13 +503,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataprocMetastoreFederationClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py index 3e0ea3b28f0a..b6e92d4eebd5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py index a3aaca4ba2cf..22edac199a78 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py @@ -604,36 +604,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataprocMetastoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -643,13 +613,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataprocMetastoreClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py index 95af3d42f8b3..431267b0231b 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py @@ -494,36 +494,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DataprocMetastoreFederationClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -533,13 +503,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DataprocMetastoreFederationClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json index d5c26c7c4b09..580020d8d539 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.16.0" + "version": "1.16.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json index 03c4008d8a13..1d12ff4531b9 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.16.0" + "version": "1.16.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json index 01386fe3d45d..827a424e1525 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.16.0" + "version": "1.16.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index 668c83d037bb..db49e903f5d0 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -346,86 +346,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataprocMetastoreClient, transports.DataprocMetastoreGrpcTransport, "grpc"), - (DataprocMetastoreClient, transports.DataprocMetastoreRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py index 166b06e7ac62..3d90019f0578 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py @@ -360,94 +360,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DataprocMetastoreFederationClient, - transports.DataprocMetastoreFederationGrpcTransport, - "grpc", - ), - ( - DataprocMetastoreFederationClient, - transports.DataprocMetastoreFederationRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index f6429bd6e71c..ba518bed0b64 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -346,86 +346,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataprocMetastoreClient, transports.DataprocMetastoreGrpcTransport, "grpc"), - (DataprocMetastoreClient, transports.DataprocMetastoreRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py index c77e5fd9ecd9..3addd60c352b 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py @@ -360,94 +360,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DataprocMetastoreFederationClient, - transports.DataprocMetastoreFederationGrpcTransport, - "grpc", - ), - ( - DataprocMetastoreFederationClient, - transports.DataprocMetastoreFederationRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index ca7b1c9f773f..a27fb9c8d80e 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -346,86 +346,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DataprocMetastoreClient, transports.DataprocMetastoreGrpcTransport, "grpc"), - (DataprocMetastoreClient, transports.DataprocMetastoreRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py index d06fb20f9c02..a7e8afa7c066 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py @@ -360,94 +360,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - DataprocMetastoreFederationClient, - transports.DataprocMetastoreFederationGrpcTransport, - "grpc", - ), - ( - DataprocMetastoreFederationClient, - transports.DataprocMetastoreFederationRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index 8e0835c419cf..ca882977e9c1 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.15.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.15.0...google-cloud-dataproc-v5.15.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [5.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.14.0...google-cloud-dataproc-v5.15.0) (2024-10-24) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index d3fb1e059462..2f51a6ead321 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.15.0" # {x-release-please-version} +__version__ = "5.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index d3fb1e059462..2f51a6ead321 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.15.0" # {x-release-please-version} +__version__ = "5.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index cf8936541353..102389445efc 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -469,36 +469,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AutoscalingPolicyServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -508,13 +478,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AutoscalingPolicyServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py index 06b9494a2bb0..2ad7f3e9a226 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -492,36 +492,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = BatchControllerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -531,13 +501,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or BatchControllerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py index f3ec1e754250..72c70fec8614 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -541,36 +541,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ClusterControllerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -580,13 +550,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ClusterControllerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py index 9e80a67a3462..8883a6b45141 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py @@ -443,36 +443,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = JobControllerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -482,13 +452,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or JobControllerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py index 5a09ceff3b7a..131f8090a81c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py @@ -470,36 +470,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = NodeGroupControllerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -509,13 +479,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or NodeGroupControllerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py index 4cf598a22ef2..2c7196b6b50f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py @@ -514,36 +514,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SessionControllerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -553,13 +523,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SessionControllerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py index 5c7bb34e4c62..48408f697fc2 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py @@ -492,36 +492,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SessionTemplateControllerClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -531,13 +501,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SessionTemplateControllerClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 61d8feb58149..7101f26dfaeb 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -544,36 +544,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = WorkflowTemplateServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -583,13 +553,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or WorkflowTemplateServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index ff1df21529fc..f8466cf79500 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.15.0" + "version": "5.15.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index 2087042eedb6..f4c91d0398a0 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -340,94 +340,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - AutoscalingPolicyServiceClient, - transports.AutoscalingPolicyServiceGrpcTransport, - "grpc", - ), - ( - AutoscalingPolicyServiceClient, - transports.AutoscalingPolicyServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index 8d900d427aee..a673847575e7 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -333,86 +333,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc"), - (BatchControllerClient, transports.BatchControllerRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 45b17a9df888..3593271f8a30 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -345,86 +345,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc"), - (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index 037ade1da6bc..1bb3f821daf7 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -329,86 +329,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (JobControllerClient, transports.JobControllerGrpcTransport, "grpc"), - (JobControllerClient, transports.JobControllerRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py index 5004a273b763..fe3d749bb3c0 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py @@ -346,94 +346,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - NodeGroupControllerClient, - transports.NodeGroupControllerGrpcTransport, - "grpc", - ), - ( - NodeGroupControllerClient, - transports.NodeGroupControllerRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py index 7c6ab36a253e..1fbded365565 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py @@ -342,86 +342,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SessionControllerClient, transports.SessionControllerGrpcTransport, "grpc"), - (SessionControllerClient, transports.SessionControllerRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py index ff8b8c23cbcf..51e8a7cfcd54 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py @@ -345,94 +345,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - SessionTemplateControllerClient, - transports.SessionTemplateControllerGrpcTransport, - "grpc", - ), - ( - SessionTemplateControllerClient, - transports.SessionTemplateControllerRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index bb1918825cc8..b738e59cf64a 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -351,94 +351,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - ( - WorkflowTemplateServiceClient, - transports.WorkflowTemplateServiceGrpcTransport, - "grpc", - ), - ( - WorkflowTemplateServiceClient, - transports.WorkflowTemplateServiceRestTransport, - "rest", - ), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datastream/CHANGELOG.md b/packages/google-cloud-datastream/CHANGELOG.md index 214844bee18a..c667a26e014f 100644 --- a/packages/google-cloud-datastream/CHANGELOG.md +++ b/packages/google-cloud-datastream/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.10.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datastream-v1.10.0...google-cloud-datastream-v1.10.1) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datastream-v1.9.5...google-cloud-datastream-v1.10.0) (2024-10-24) diff --git a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py index d1d2a9e60a97..f1d827b5c728 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py index d1d2a9e60a97..f1d827b5c728 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py index 853071962115..77e8053b60a5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py @@ -580,36 +580,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DatastreamClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -619,13 +589,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DatastreamClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py index d1d2a9e60a97..f1d827b5c728 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.10.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py index ce57716e9865..8dbe4ed4fee1 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py @@ -533,36 +533,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DatastreamClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -572,13 +542,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DatastreamClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json index 2a6eeafecdcd..e51156316e2e 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.10.0" + "version": "1.10.1" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json index 03f40cfcd6e8..e83ae9de83b6 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.10.0" + "version": "1.10.1" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py index 20c3591b1848..600b3f7f4da1 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py @@ -313,86 +313,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DatastreamClient, transports.DatastreamGrpcTransport, "grpc"), - (DatastreamClient, transports.DatastreamRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py index 7f7967c89b17..ff611a29f85d 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py @@ -309,86 +309,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DatastreamClient, transports.DatastreamGrpcTransport, "grpc"), - (DatastreamClient, transports.DatastreamRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-deploy/CHANGELOG.md b/packages/google-cloud-deploy/CHANGELOG.md index 74c3e8e5d684..c621fef6afee 100644 --- a/packages/google-cloud-deploy/CHANGELOG.md +++ b/packages/google-cloud-deploy/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [2.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v2.2.0...google-cloud-deploy-v2.3.0) (2024-10-31) + + +### Features + +* added new fields for the Automation Repair rule ([5bad720](https://github.com/googleapis/google-cloud-python/commit/5bad72013c2ad2727bdf3628454437e2047b2c9b)) +* added route destination related fields to Gateway service mesh message ([5bad720](https://github.com/googleapis/google-cloud-python/commit/5bad72013c2ad2727bdf3628454437e2047b2c9b)) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [2.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v2.1.0...google-cloud-deploy-v2.2.0) (2024-10-24) diff --git a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py index 4b051017ed31..e4cc21aeab87 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/__init__.py @@ -36,6 +36,7 @@ AnthosCluster, ApproveRolloutRequest, ApproveRolloutResponse, + AssociatedEntities, Automation, AutomationResourceSelector, AutomationRolloutMetadata, @@ -144,13 +145,16 @@ Release, RenderMetadata, RepairPhase, + RepairPhaseConfig, RepairRolloutOperation, RepairRolloutRule, RepairState, + Retry, RetryAttempt, RetryJobRequest, RetryJobResponse, RetryPhase, + Rollback, RollbackAttempt, RollbackTargetConfig, RollbackTargetRequest, @@ -226,6 +230,7 @@ "AnthosCluster", "ApproveRolloutRequest", "ApproveRolloutResponse", + "AssociatedEntities", "Automation", "AutomationResourceSelector", "AutomationRolloutMetadata", @@ -333,12 +338,15 @@ "Release", "RenderMetadata", "RepairPhase", + "RepairPhaseConfig", "RepairRolloutOperation", "RepairRolloutRule", + "Retry", "RetryAttempt", "RetryJobRequest", "RetryJobResponse", "RetryPhase", + "Rollback", "RollbackAttempt", "RollbackTargetConfig", "RollbackTargetRequest", diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index 04f353585881..38b74a960221 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.2.0" # {x-release-please-version} +__version__ = "2.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py index c9c4ecd71f25..1c2b034046fb 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/__init__.py @@ -33,6 +33,7 @@ AnthosCluster, ApproveRolloutRequest, ApproveRolloutResponse, + AssociatedEntities, Automation, AutomationResourceSelector, AutomationRolloutMetadata, @@ -141,13 +142,16 @@ Release, RenderMetadata, RepairPhase, + RepairPhaseConfig, RepairRolloutOperation, RepairRolloutRule, RepairState, + Retry, RetryAttempt, RetryJobRequest, RetryJobResponse, RetryPhase, + Rollback, RollbackAttempt, RollbackTargetConfig, RollbackTargetRequest, @@ -208,6 +212,7 @@ "AnthosCluster", "ApproveRolloutRequest", "ApproveRolloutResponse", + "AssociatedEntities", "Automation", "AutomationEvent", "AutomationResourceSelector", @@ -326,13 +331,16 @@ "ReleaseRenderEvent", "RenderMetadata", "RepairPhase", + "RepairPhaseConfig", "RepairRolloutOperation", "RepairRolloutRule", "RepairState", + "Retry", "RetryAttempt", "RetryJobRequest", "RetryJobResponse", "RetryPhase", + "Rollback", "RollbackAttempt", "RollbackTargetConfig", "RollbackTargetRequest", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index 04f353585881..38b74a960221 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.2.0" # {x-release-please-version} +__version__ = "2.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index 0f24eaa59285..011e05bafe54 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -843,36 +843,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudDeployClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -882,13 +852,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or CloudDeployClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py index 7017500e27c7..baf15d3a4001 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/__init__.py @@ -27,6 +27,7 @@ AnthosCluster, ApproveRolloutRequest, ApproveRolloutResponse, + AssociatedEntities, Automation, AutomationResourceSelector, AutomationRolloutMetadata, @@ -135,13 +136,16 @@ Release, RenderMetadata, RepairPhase, + RepairPhaseConfig, RepairRolloutOperation, RepairRolloutRule, RepairState, + Retry, RetryAttempt, RetryJobRequest, RetryJobResponse, RetryPhase, + Rollback, RollbackAttempt, RollbackTargetConfig, RollbackTargetRequest, @@ -199,6 +203,7 @@ "AnthosCluster", "ApproveRolloutRequest", "ApproveRolloutResponse", + "AssociatedEntities", "Automation", "AutomationResourceSelector", "AutomationRolloutMetadata", @@ -306,12 +311,15 @@ "Release", "RenderMetadata", "RepairPhase", + "RepairPhaseConfig", "RepairRolloutOperation", "RepairRolloutRule", + "Retry", "RetryAttempt", "RetryJobRequest", "RetryJobResponse", "RetryPhase", + "Rollback", "RollbackAttempt", "RollbackTargetConfig", "RollbackTargetRequest", diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index 7eed27b03034..d84d4966c326 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -67,6 +67,7 @@ "CloudRunLocation", "MultiTarget", "CustomTarget", + "AssociatedEntities", "ListTargetsRequest", "ListTargetsResponse", "GetTargetRequest", @@ -164,6 +165,9 @@ "PromoteReleaseRule", "AdvanceRolloutRule", "RepairRolloutRule", + "RepairPhaseConfig", + "Retry", + "Rollback", "AutomationRuleCondition", "CreateAutomationRequest", "UpdateAutomationRequest", @@ -783,8 +787,48 @@ class GatewayServiceMesh(proto.Message): Pods for the Deployment and Service resources. This label must already be present in both resources. + route_destinations (google.cloud.deploy_v1.types.KubernetesConfig.GatewayServiceMesh.RouteDestinations): + Optional. Route destinations allow + configuring the Gateway API HTTPRoute to be + deployed to additional clusters. This option is + available for multi-cluster service mesh set ups + that require the route to exist in the clusters + that call the service. If unspecified, the + HTTPRoute will only be deployed to the Target + cluster. """ + class RouteDestinations(proto.Message): + r"""Information about route destinations for the Gateway API + service mesh. + + Attributes: + destination_ids (MutableSequence[str]): + Required. The clusters where the Gateway API + HTTPRoute resource will be deployed to. Valid + entries include the associated entities IDs + configured in the Target resource and "@self" to + include the Target cluster. + propagate_service (bool): + Optional. Whether to propagate the Kubernetes + Service to the route destination clusters. The + Service will always be deployed to the Target + cluster even if the HTTPRoute is not. This + option may be used to facilitiate successful DNS + lookup in the route destination clusters. Can + only be set to true if destinations are + specified. + """ + + destination_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + propagate_service: bool = proto.Field( + proto.BOOL, + number=2, + ) + http_route: str = proto.Field( proto.STRING, number=1, @@ -811,6 +855,13 @@ class GatewayServiceMesh(proto.Message): proto.STRING, number=6, ) + route_destinations: "KubernetesConfig.GatewayServiceMesh.RouteDestinations" = ( + proto.Field( + proto.MESSAGE, + number=8, + message="KubernetesConfig.GatewayServiceMesh.RouteDestinations", + ) + ) class ServiceNetworking(proto.Message): r"""Information about the Kubernetes Service networking @@ -1540,6 +1591,17 @@ class Target(proto.Message): Target. This field is a member of `oneof`_ ``deployment_target``. + associated_entities (MutableMapping[str, google.cloud.deploy_v1.types.AssociatedEntities]): + Optional. Map of entity IDs to their associated entities. + Associated entities allows specifying places other than the + deployment target for specific features. For example, the + Gateway API canary can be configured to deploy the HTTPRoute + to a different cluster(s) than the deployment cluster using + associated entities. An entity ID must consist of lower-case + letters, numbers, and hyphens, start with a letter and end + with a letter or a number, and have a max length of 63 + characters. In other words, it must match the following + regex: ``^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$``. etag (str): Optional. This checksum is computed by the server based on the value of other fields, and @@ -1630,6 +1692,12 @@ class Target(proto.Message): oneof="deployment_target", message="CustomTarget", ) + associated_entities: MutableMapping[str, "AssociatedEntities"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=23, + message="AssociatedEntities", + ) etag: str = proto.Field( proto.STRING, number=12, @@ -1923,6 +1991,30 @@ class CustomTarget(proto.Message): ) +class AssociatedEntities(proto.Message): + r"""Information about entities associated with a ``Target``. + + Attributes: + gke_clusters (MutableSequence[google.cloud.deploy_v1.types.GkeCluster]): + Optional. Information specifying GKE clusters + as associated entities. + anthos_clusters (MutableSequence[google.cloud.deploy_v1.types.AnthosCluster]): + Optional. Information specifying Anthos + clusters as associated entities. + """ + + gke_clusters: MutableSequence["GkeCluster"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="GkeCluster", + ) + anthos_clusters: MutableSequence["AnthosCluster"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="AnthosCluster", + ) + + class ListTargetsRequest(proto.Message): r"""The request object for ``ListTargets``. @@ -4403,6 +4495,9 @@ class Rollout(proto.Message): rolled_back_by_rollouts (MutableSequence[str]): Output only. Names of ``Rollouts`` that rolled back this ``Rollout``. + active_repair_automation_run (str): + Output only. The AutomationRun actively + repairing the rollout. """ class ApprovalState(proto.Enum): @@ -4607,6 +4702,10 @@ class FailureCause(proto.Enum): proto.STRING, number=27, ) + active_repair_automation_run: str = proto.Field( + proto.STRING, + number=28, + ) class Metadata(proto.Message): @@ -6547,6 +6646,15 @@ class RepairRolloutRule(proto.Message): Required. ID of the rule. This id must be unique in the ``Automation`` resource to which this rule belongs. The format is ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?``. + phases (MutableSequence[str]): + Optional. Phases within which jobs are subject to automatic + repair actions on failure. Proceeds only after phase name + matched any one in the list, or for all phases if + unspecified. This value must consist of lower-case letters, + numbers, and hyphens, start with a letter and end with a + letter or a number, and have a max length of 63 characters. + In other words, it must match the following regex: + ``^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$``. jobs (MutableSequence[str]): Optional. Jobs to repair. Proceeds only after job name matched any one in the list, or for all jobs if unspecified @@ -6559,12 +6667,19 @@ class RepairRolloutRule(proto.Message): condition (google.cloud.deploy_v1.types.AutomationRuleCondition): Output only. Information around the state of the 'Automation' rule. + repair_phases (MutableSequence[google.cloud.deploy_v1.types.RepairPhaseConfig]): + Required. Defines the types of automatic + repair phases for failed jobs. """ id: str = proto.Field( proto.STRING, number=1, ) + phases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) jobs: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, @@ -6574,6 +6689,102 @@ class RepairRolloutRule(proto.Message): number=6, message="AutomationRuleCondition", ) + repair_phases: MutableSequence["RepairPhaseConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="RepairPhaseConfig", + ) + + +class RepairPhaseConfig(proto.Message): + r"""Configuration of the repair phase. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + retry (google.cloud.deploy_v1.types.Retry): + Optional. Retries a failed job. + + This field is a member of `oneof`_ ``repair_phase``. + rollback (google.cloud.deploy_v1.types.Rollback): + Optional. Rolls back a ``Rollout``. + + This field is a member of `oneof`_ ``repair_phase``. + """ + + retry: "Retry" = proto.Field( + proto.MESSAGE, + number=1, + oneof="repair_phase", + message="Retry", + ) + rollback: "Rollback" = proto.Field( + proto.MESSAGE, + number=2, + oneof="repair_phase", + message="Rollback", + ) + + +class Retry(proto.Message): + r"""Retries the failed job. + + Attributes: + attempts (int): + Required. Total number of retries. Retry is + skipped if set to 0; The minimum value is 1, and + the maximum value is 10. + wait (google.protobuf.duration_pb2.Duration): + Optional. How long to wait for the first + retry. Default is 0, and the maximum value is + 14d. + backoff_mode (google.cloud.deploy_v1.types.BackoffMode): + Optional. The pattern of how wait time will be increased. + Default is linear. Backoff mode will be ignored if ``wait`` + is 0. + """ + + attempts: int = proto.Field( + proto.INT64, + number=1, + ) + wait: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + backoff_mode: "BackoffMode" = proto.Field( + proto.ENUM, + number=3, + enum="BackoffMode", + ) + + +class Rollback(proto.Message): + r"""Rolls back a ``Rollout``. + + Attributes: + destination_phase (str): + Optional. The starting phase ID for the ``Rollout``. If + unspecified, the ``Rollout`` will start in the stable phase. + disable_rollback_if_rollout_pending (bool): + Optional. If pending rollout exists on the + target, the rollback operation will be aborted. + """ + + destination_phase: str = proto.Field( + proto.STRING, + number=1, + ) + disable_rollback_if_rollout_pending: bool = proto.Field( + proto.BOOL, + number=2, + ) class AutomationRuleCondition(proto.Message): @@ -7158,6 +7369,9 @@ class RepairRolloutOperation(proto.Message): rollout (str): Output only. The name of the rollout that initiates the ``AutomationRun``. + current_repair_phase_index (int): + Output only. The index of the current repair + action in the repair sequence. repair_phases (MutableSequence[google.cloud.deploy_v1.types.RepairPhase]): Output only. Records of the repair attempts. Each repair phase may have multiple retry @@ -7174,6 +7388,10 @@ class RepairRolloutOperation(proto.Message): proto.STRING, number=1, ) + current_repair_phase_index: int = proto.Field( + proto.INT64, + number=6, + ) repair_phases: MutableSequence["RepairPhase"] = proto.RepeatedField( proto.MESSAGE, number=3, @@ -7312,6 +7530,9 @@ class RollbackAttempt(proto.Message): state_desc (str): Output only. Description of the state of the Rollback. + disable_rollback_if_rollout_pending (bool): + Output only. If active rollout exists on the + target, abort this rollback. """ destination_phase: str = proto.Field( @@ -7331,6 +7552,10 @@ class RollbackAttempt(proto.Message): proto.STRING, number=4, ) + disable_rollback_if_rollout_pending: bool = proto.Field( + proto.BOOL, + number=5, + ) class ListAutomationRunsRequest(proto.Message): diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index c85274a3d0f2..8be081b739aa 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "2.2.0" + "version": "2.3.0" }, "snippets": [ { diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index d8dc87560e05..3eac3f76854a 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -320,86 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"), - (CloudDeployClient, transports.CloudDeployRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12395,6 +12315,7 @@ def test_get_rollout(request_type, transport: str = "grpc"): controller_rollout="controller_rollout_value", rollback_of_rollout="rollback_of_rollout_value", rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], + active_repair_automation_run="active_repair_automation_run_value", ) response = client.get_rollout(request) @@ -12422,6 +12343,7 @@ def test_get_rollout(request_type, transport: str = "grpc"): assert response.controller_rollout == "controller_rollout_value" assert response.rollback_of_rollout == "rollback_of_rollout_value" assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] + assert response.active_repair_automation_run == "active_repair_automation_run_value" def test_get_rollout_non_empty_request_with_auto_populated_field(): @@ -12560,6 +12482,7 @@ async def test_get_rollout_async( controller_rollout="controller_rollout_value", rollback_of_rollout="rollback_of_rollout_value", rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], + active_repair_automation_run="active_repair_automation_run_value", ) ) response = await client.get_rollout(request) @@ -12588,6 +12511,7 @@ async def test_get_rollout_async( assert response.controller_rollout == "controller_rollout_value" assert response.rollback_of_rollout == "rollback_of_rollout_value" assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] + assert response.active_repair_automation_run == "active_repair_automation_run_value" @pytest.mark.asyncio @@ -29824,6 +29748,7 @@ async def test_get_rollout_empty_call_grpc_asyncio(): controller_rollout="controller_rollout_value", rollback_of_rollout="rollback_of_rollout_value", rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], + active_repair_automation_run="active_repair_automation_run_value", ) ) await client.get_rollout(request=None) @@ -30597,6 +30522,13 @@ def test_create_delivery_pipeline_rest_call_success(request_type): }, "stable_cutback_duration": {}, "pod_selector_label": "pod_selector_label_value", + "route_destinations": { + "destination_ids": [ + "destination_ids_value1", + "destination_ids_value2", + ], + "propagate_service": True, + }, }, "service_networking": { "service": "service_value", @@ -30891,6 +30823,13 @@ def test_update_delivery_pipeline_rest_call_success(request_type): }, "stable_cutback_duration": {}, "pod_selector_label": "pod_selector_label_value", + "route_destinations": { + "destination_ids": [ + "destination_ids_value1", + "destination_ids_value2", + ], + "propagate_service": True, + }, }, "service_networking": { "service": "service_value", @@ -31645,6 +31584,7 @@ def test_create_target_rest_call_success(request_type): "run": {"location": "location_value"}, "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, "custom_target": {"custom_target_type": "custom_target_type_value"}, + "associated_entities": {}, "etag": "etag_value", "execution_configs": [ { @@ -31870,6 +31810,7 @@ def test_update_target_rest_call_success(request_type): "run": {"location": "location_value"}, "multi_target": {"target_ids": ["target_ids_value1", "target_ids_value2"]}, "custom_target": {"custom_target_type": "custom_target_type_value"}, + "associated_entities": {}, "etag": "etag_value", "execution_configs": [ { @@ -33319,6 +33260,13 @@ def test_create_release_rest_call_success(request_type): }, "stable_cutback_duration": {}, "pod_selector_label": "pod_selector_label_value", + "route_destinations": { + "destination_ids": [ + "destination_ids_value1", + "destination_ids_value2", + ], + "propagate_service": True, + }, }, "service_networking": { "service": "service_value", @@ -33412,6 +33360,7 @@ def test_create_release_rest_call_success(request_type): "target_ids": ["target_ids_value1", "target_ids_value2"] }, "custom_target": {"custom_target_type": "custom_target_type_value"}, + "associated_entities": {}, "etag": "etag_value", "execution_configs": [ { @@ -35130,6 +35079,7 @@ def test_get_rollout_rest_call_success(request_type): controller_rollout="controller_rollout_value", rollback_of_rollout="rollback_of_rollout_value", rolled_back_by_rollouts=["rolled_back_by_rollouts_value"], + active_repair_automation_run="active_repair_automation_run_value", ) # Wrap the value into a proper Response obj @@ -35161,6 +35111,7 @@ def test_get_rollout_rest_call_success(request_type): assert response.controller_rollout == "controller_rollout_value" assert response.rollback_of_rollout == "rollback_of_rollout_value" assert response.rolled_back_by_rollouts == ["rolled_back_by_rollouts_value"] + assert response.active_repair_automation_run == "active_repair_automation_run_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -35335,6 +35286,7 @@ def test_create_rollout_rest_call_success(request_type): "rolled_back_by_rollouts_value1", "rolled_back_by_rollouts_value2", ], + "active_repair_automation_run": "active_repair_automation_run_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -36289,8 +36241,18 @@ def test_create_automation_rest_call_success(request_type): }, "repair_rollout_rule": { "id": "id_value", + "phases": ["phases_value1", "phases_value2"], "jobs": ["jobs_value1", "jobs_value2"], "condition": {}, + "repair_phases": [ + { + "retry": {"attempts": 882, "wait": {}, "backoff_mode": 1}, + "rollback": { + "destination_phase": "destination_phase_value", + "disable_rollback_if_rollout_pending": True, + }, + } + ], }, } ], @@ -36523,8 +36485,18 @@ def test_update_automation_rest_call_success(request_type): }, "repair_rollout_rule": { "id": "id_value", + "phases": ["phases_value1", "phases_value2"], "jobs": ["jobs_value1", "jobs_value2"], "condition": {}, + "repair_phases": [ + { + "retry": {"attempts": 882, "wait": {}, "backoff_mode": 1}, + "rollback": { + "destination_phase": "destination_phase_value", + "disable_rollback_if_rollout_pending": True, + }, + } + ], }, } ], diff --git a/packages/google-cloud-developerconnect/CHANGELOG.md b/packages/google-cloud-developerconnect/CHANGELOG.md index d55ffcd1edf3..0b9124561b18 100644 --- a/packages/google-cloud-developerconnect/CHANGELOG.md +++ b/packages/google-cloud-developerconnect/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-developerconnect-v0.1.3...google-cloud-developerconnect-v0.1.4) (2024-10-31) + + +### Bug Fixes + +* disable universe-domain validation ([53c951e](https://github.com/googleapis/google-cloud-python/commit/53c951e90ad1d702fa507495532086d5d2f6b3c0)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-developerconnect-v0.1.2...google-cloud-developerconnect-v0.1.3) (2024-10-24) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py index 844c446da73b..3fb61f46972c 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py @@ -517,36 +517,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DeveloperConnectClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -556,13 +526,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DeveloperConnectClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json index 7f840565943c..020e6f209525 100644 --- a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json +++ b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-developerconnect", - "version": "0.1.3" + "version": "0.1.4" }, "snippets": [ { diff --git a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py index f0b2007e96f0..c94e697b442d 100644 --- a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py +++ b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py @@ -333,86 +333,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (DeveloperConnectClient, transports.DeveloperConnectGrpcTransport, "grpc"), - (DeveloperConnectClient, transports.DeveloperConnectRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dialogflow-cx/CHANGELOG.md b/packages/google-cloud-dialogflow-cx/CHANGELOG.md index 8d112b11f123..092f3d2f8b67 100644 --- a/packages/google-cloud-dialogflow-cx/CHANGELOG.md +++ b/packages/google-cloud-dialogflow-cx/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog + +## [1.37.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.36.0...google-cloud-dialogflow-cx-v1.37.0) (2024-11-11) + + +### Features + +* add options of client_certificate_settings, bigquery_export_settings, bearer_token_config and boost_control_spec; add support of ALAW encoding ([3e438ff](https://github.com/googleapis/google-cloud-python/commit/3e438ff52c5b3c436cf8e46d82c54b9caf77a658)) + + +### Bug Fixes + +* disable universe-domain validation ([#13242](https://github.com/googleapis/google-cloud-python/issues/13242)) ([b479ff8](https://github.com/googleapis/google-cloud-python/commit/b479ff841ed93a18393a188ee1d72edf9fb729ec)) + ## [1.36.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.35.0...google-cloud-dialogflow-cx-v1.36.0) (2024-10-24) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py index ed461d017338..3026e41db675 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.36.0" # {x-release-please-version} +__version__ = "1.37.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py index ed461d017338..3026e41db675 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.36.0" # {x-release-please-version} +__version__ = "1.37.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py index aa90b2a34505..a35dc6b19fce 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py @@ -100,6 +100,8 @@ class AgentsAsyncClient: parse_flow_validation_result_path = staticmethod( AgentsClient.parse_flow_validation_result_path ) + secret_version_path = staticmethod(AgentsClient.secret_version_path) + parse_secret_version_path = staticmethod(AgentsClient.parse_secret_version_path) security_settings_path = staticmethod(AgentsClient.security_settings_path) parse_security_settings_path = staticmethod( AgentsClient.parse_security_settings_path @@ -329,7 +331,7 @@ async def sample_list_agents(): [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. parent (:class:`str`): Required. The location to list all agents for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -449,7 +451,7 @@ async def sample_get_agent(): [Agents.GetAgent][google.cloud.dialogflow.cx.v3.Agents.GetAgent]. name (:class:`str`): Required. The name of the agent. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -579,7 +581,7 @@ async def sample_create_agent(): [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent]. parent (:class:`str`): Required. The location to create a agent for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -841,7 +843,7 @@ async def sample_delete_agent(): [Agents.DeleteAgent][google.cloud.dialogflow.cx.v3.Agents.DeleteAgent]. name (:class:`str`): Required. The name of the agent to delete. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1259,7 +1261,7 @@ async def sample_get_agent_validation_result(): [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. name (:class:`str`): Required. The agent name. Format: - ``projects//locations//agents//validationResult``. + ``projects//locations//agents//validationResult``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1368,7 +1370,7 @@ async def sample_get_generative_settings(): RPC. name (:class:`str`): Required. Format: - ``projects//locations//agents//generativeSettings``. + ``projects//locations//agents//generativeSettings``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py index f6e0eecbfe74..6929327e6b45 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py @@ -335,6 +335,28 @@ def parse_flow_validation_result_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def secret_version_path( + project: str, + secret: str, + version: str, + ) -> str: + """Returns a fully-qualified secret_version string.""" + return "projects/{project}/secrets/{secret}/versions/{version}".format( + project=project, + secret=secret, + version=version, + ) + + @staticmethod + def parse_secret_version_path(path: str) -> Dict[str, str]: + """Parses a secret_version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def security_settings_path( project: str, @@ -613,36 +635,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AgentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -652,13 +644,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or AgentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -871,7 +859,7 @@ def sample_list_agents(): [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. parent (str): Required. The location to list all agents for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -988,7 +976,7 @@ def sample_get_agent(): [Agents.GetAgent][google.cloud.dialogflow.cx.v3.Agents.GetAgent]. name (str): Required. The name of the agent. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1115,7 +1103,7 @@ def sample_create_agent(): [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent]. parent (str): Required. The location to create a agent for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1371,7 +1359,7 @@ def sample_delete_agent(): [Agents.DeleteAgent][google.cloud.dialogflow.cx.v3.Agents.DeleteAgent]. name (str): Required. The name of the agent to delete. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1780,7 +1768,7 @@ def sample_get_agent_validation_result(): [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. name (str): Required. The agent name. Format: - ``projects//locations//agents//validationResult``. + ``projects//locations//agents//validationResult``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1888,7 +1876,7 @@ def sample_get_generative_settings(): RPC. name (str): Required. Format: - ``projects//locations//agents//generativeSettings``. + ``projects//locations//agents//generativeSettings``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py index 030aa053b051..2612e71c001d 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py @@ -298,7 +298,7 @@ async def sample_list_changelogs(): [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. parent (:class:`str`): Required. The agent containing the changelogs. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -418,7 +418,7 @@ async def sample_get_changelog(): [Changelogs.GetChangelog][google.cloud.dialogflow.cx.v3.Changelogs.GetChangelog]. name (:class:`str`): Required. The name of the changelog to get. Format: - ``projects//locations//agents//changelogs/``. + ``projects//locations//agents//changelogs/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py index 7bbbe3e1c5c5..4d65010257ba 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py @@ -467,36 +467,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ChangelogsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -506,13 +476,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ChangelogsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -724,7 +690,7 @@ def sample_list_changelogs(): [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. parent (str): Required. The agent containing the changelogs. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -841,7 +807,7 @@ def sample_get_changelog(): [Changelogs.GetChangelog][google.cloud.dialogflow.cx.v3.Changelogs.GetChangelog]. name (str): Required. The name of the changelog to get. Format: - ``projects//locations//agents//changelogs/``. + ``projects//locations//agents//changelogs/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py index fdf5142a5b13..8dc0705a2d33 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py @@ -311,7 +311,7 @@ async def sample_list_deployments(): Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -434,7 +434,7 @@ async def sample_get_deployment(): Required. The name of the [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. Format: - ``projects//locations//agents//environments//deployments/``. + ``projects//locations//agents//environments//deployments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py index 61d1d2fc18b1..b886375f5165 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py @@ -547,36 +547,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DeploymentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -586,13 +556,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or DeploymentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -807,7 +773,7 @@ def sample_list_deployments(): Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -927,7 +893,7 @@ def sample_get_deployment(): Required. The name of the [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. Format: - ``projects//locations//agents//environments//deployments/``. + ``projects//locations//agents//environments//deployments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py index bd301dfd4b8f..9ee04580cf0d 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py @@ -302,7 +302,7 @@ async def sample_get_entity_type(): [EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.GetEntityType]. name (:class:`str`): Required. The name of the entity type. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -450,7 +450,7 @@ async def sample_create_entity_type(): [EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.CreateEntityType]. parent (:class:`str`): Required. The agent to create a entity type for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -752,7 +752,7 @@ async def sample_delete_entity_type(): [EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.DeleteEntityType]. name (:class:`str`): Required. The name of the entity type to delete. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -852,7 +852,7 @@ async def sample_list_entity_types(): parent (:class:`str`): Required. The agent to list all entity types for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py index 5cc21cbc0f1d..21307be48ac0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py @@ -470,36 +470,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = EntityTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -509,13 +479,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or EntityTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -726,7 +692,7 @@ def sample_get_entity_type(): [EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.GetEntityType]. name (str): Required. The name of the entity type. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -871,7 +837,7 @@ def sample_create_entity_type(): [EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.CreateEntityType]. parent (str): Required. The agent to create a entity type for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1167,7 +1133,7 @@ def sample_delete_entity_type(): [EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.DeleteEntityType]. name (str): Required. The name of the entity type to delete. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1264,7 +1230,7 @@ def sample_list_entity_types(): parent (str): Required. The agent to list all entity types for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py index f02de8d96ce7..325d8eb6bc35 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py @@ -326,7 +326,7 @@ async def sample_list_environments(): Required. The [Agent][google.cloud.dialogflow.cx.v3.Agent] to list all environments for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -449,7 +449,7 @@ async def sample_get_environment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment]. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -595,7 +595,7 @@ async def sample_create_environment(): an [Environment][google.cloud.dialogflow.cx.v3.Environment] for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -882,7 +882,7 @@ async def sample_delete_environment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment] to delete. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -984,7 +984,7 @@ async def sample_lookup_environment_history(): name (:class:`str`): Required. Resource name of the environment to look up the history for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1219,7 +1219,7 @@ async def sample_list_continuous_test_results(): [Environments.ListContinuousTestResults][google.cloud.dialogflow.cx.v3.Environments.ListContinuousTestResults]. parent (:class:`str`): Required. The environment to list results for. Format: - ``projects//locations//agents// environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py index 87cf02f634bb..560f86faa822 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py @@ -622,36 +622,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = EnvironmentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -661,13 +631,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or EnvironmentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -882,7 +848,7 @@ def sample_list_environments(): Required. The [Agent][google.cloud.dialogflow.cx.v3.Agent] to list all environments for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1002,7 +968,7 @@ def sample_get_environment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment]. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1145,7 +1111,7 @@ def sample_create_environment(): an [Environment][google.cloud.dialogflow.cx.v3.Environment] for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1426,7 +1392,7 @@ def sample_delete_environment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment] to delete. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1525,7 +1491,7 @@ def sample_lookup_environment_history(): name (str): Required. Resource name of the environment to look up the history for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1757,7 +1723,7 @@ def sample_list_continuous_test_results(): [Environments.ListContinuousTestResults][google.cloud.dialogflow.cx.v3.Environments.ListContinuousTestResults]. parent (str): Required. The environment to list results for. Format: - ``projects//locations//agents// environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py index 7a76c6b80273..b9925af68670 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py @@ -308,7 +308,7 @@ async def sample_list_experiments(): Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -431,7 +431,7 @@ async def sample_get_experiment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment]. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -549,7 +549,7 @@ async def sample_create_experiment(): an [Environment][google.cloud.dialogflow.cx.v3.Environment] for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -782,7 +782,7 @@ async def sample_delete_experiment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment] to delete. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -882,7 +882,7 @@ async def sample_start_experiment(): name (:class:`str`): Required. Resource name of the experiment to start. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -991,7 +991,7 @@ async def sample_stop_experiment(): name (:class:`str`): Required. Resource name of the experiment to stop. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py index 5715d05dce50..d9fadc52810c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py @@ -498,36 +498,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ExperimentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -537,13 +507,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ExperimentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -758,7 +724,7 @@ def sample_list_experiments(): Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -878,7 +844,7 @@ def sample_get_experiment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment]. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -993,7 +959,7 @@ def sample_create_experiment(): an [Environment][google.cloud.dialogflow.cx.v3.Environment] for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1220,7 +1186,7 @@ def sample_delete_experiment(): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment] to delete. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1317,7 +1283,7 @@ def sample_start_experiment(): name (str): Required. Resource name of the experiment to start. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1423,7 +1389,7 @@ def sample_stop_experiment(): name (str): Required. Resource name of the experiment to stop. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py index 41bac1c15431..27d4abfd63f2 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py @@ -322,7 +322,7 @@ async def sample_create_flow(): [Flows.CreateFlow][google.cloud.dialogflow.cx.v3.Flows.CreateFlow]. parent (:class:`str`): Required. The agent to create a flow for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -453,7 +453,7 @@ async def sample_delete_flow(): [Flows.DeleteFlow][google.cloud.dialogflow.cx.v3.Flows.DeleteFlow]. name (:class:`str`): Required. The name of the flow to delete. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -551,7 +551,7 @@ async def sample_list_flows(): [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. parent (:class:`str`): Required. The agent containing the flows. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -671,7 +671,7 @@ async def sample_get_flow(): [Flows.GetFlow][google.cloud.dialogflow.cx.v3.Flows.GetFlow]. name (:class:`str`): Required. The name of the flow to get. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -960,7 +960,7 @@ async def sample_train_flow(): [Flows.TrainFlow][google.cloud.dialogflow.cx.v3.Flows.TrainFlow]. name (:class:`str`): Required. The flow to train. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1172,7 +1172,7 @@ async def sample_get_flow_validation_result(): [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. name (:class:`str`): Required. The flow name. Format: - ``projects//locations//agents//flows//validationResult``. + ``projects//locations//agents//flows//validationResult``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py index 88cf18063bf3..2b9c5bf0eb42 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py @@ -597,36 +597,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = FlowsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -636,13 +606,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or FlowsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -862,7 +828,7 @@ def sample_create_flow(): [Flows.CreateFlow][google.cloud.dialogflow.cx.v3.Flows.CreateFlow]. parent (str): Required. The agent to create a flow for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -990,7 +956,7 @@ def sample_delete_flow(): [Flows.DeleteFlow][google.cloud.dialogflow.cx.v3.Flows.DeleteFlow]. name (str): Required. The name of the flow to delete. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1085,7 +1051,7 @@ def sample_list_flows(): [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. parent (str): Required. The agent containing the flows. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1202,7 +1168,7 @@ def sample_get_flow(): [Flows.GetFlow][google.cloud.dialogflow.cx.v3.Flows.GetFlow]. name (str): Required. The name of the flow to get. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1487,7 +1453,7 @@ def sample_train_flow(): [Flows.TrainFlow][google.cloud.dialogflow.cx.v3.Flows.TrainFlow]. name (str): Required. The flow to train. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1694,7 +1660,7 @@ def sample_get_flow_validation_result(): [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. name (str): Required. The flow name. Format: - ``projects//locations//agents//flows//validationResult``. + ``projects//locations//agents//flows//validationResult``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py index bf28f8b28558..19505a87627b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/async_client.py @@ -300,7 +300,7 @@ async def sample_list_generators(): [Generators.ListGenerators][google.cloud.dialogflow.cx.v3.Generators.ListGenerators]. parent (:class:`str`): Required. The agent to list all generators for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -420,7 +420,7 @@ async def sample_get_generator(): [Generators.GetGenerator][google.cloud.dialogflow.cx.v3.Generators.GetGenerator]. name (:class:`str`): Required. The name of the generator. Format: - ``projects//locations//agents//generators/``. + ``projects//locations//agents//generators/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -536,7 +536,7 @@ async def sample_create_generator(): [Generators.CreateGenerator][google.cloud.dialogflow.cx.v3.Generators.CreateGenerator]. parent (:class:`str`): Required. The agent to create a generator for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -775,7 +775,7 @@ async def sample_delete_generator(): [Generators.DeleteGenerator][google.cloud.dialogflow.cx.v3.Generators.DeleteGenerator]. name (:class:`str`): Required. The name of the generator to delete. Format: - ``projects//locations//agents//generators/``. + ``projects//locations//agents//generators/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py index 802f56f74db2..33fc136f2598 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py @@ -468,36 +468,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = GeneratorsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -507,13 +477,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or GeneratorsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -726,7 +692,7 @@ def sample_list_generators(): [Generators.ListGenerators][google.cloud.dialogflow.cx.v3.Generators.ListGenerators]. parent (str): Required. The agent to list all generators for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -843,7 +809,7 @@ def sample_get_generator(): [Generators.GetGenerator][google.cloud.dialogflow.cx.v3.Generators.GetGenerator]. name (str): Required. The name of the generator. Format: - ``projects//locations//agents//generators/``. + ``projects//locations//agents//generators/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -956,7 +922,7 @@ def sample_create_generator(): [Generators.CreateGenerator][google.cloud.dialogflow.cx.v3.Generators.CreateGenerator]. parent (str): Required. The agent to create a generator for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1189,7 +1155,7 @@ def sample_delete_generator(): [Generators.DeleteGenerator][google.cloud.dialogflow.cx.v3.Generators.DeleteGenerator]. name (str): Required. The name of the generator to delete. Format: - ``projects//locations//agents//generators/``. + ``projects//locations//agents//generators/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py index ec7831f84c39..776db47d1d05 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py @@ -302,7 +302,7 @@ async def sample_list_intents(): [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. parent (:class:`str`): Required. The agent to list all intents for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -422,7 +422,7 @@ async def sample_get_intent(): [Intents.GetIntent][google.cloud.dialogflow.cx.v3.Intents.GetIntent]. name (:class:`str`): Required. The name of the intent. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -542,7 +542,7 @@ async def sample_create_intent(): [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. parent (:class:`str`): Required. The agent to create an intent for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -790,7 +790,7 @@ async def sample_delete_intent(): [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent]. name (:class:`str`): Required. The name of the intent to delete. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py index 0c1a624e75f3..9efea401b7a8 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py @@ -494,36 +494,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = IntentsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -533,13 +503,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or IntentsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -752,7 +718,7 @@ def sample_list_intents(): [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. parent (str): Required. The agent to list all intents for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -869,7 +835,7 @@ def sample_get_intent(): [Intents.GetIntent][google.cloud.dialogflow.cx.v3.Intents.GetIntent]. name (str): Required. The name of the intent. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -986,7 +952,7 @@ def sample_create_intent(): [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. parent (str): Required. The agent to create an intent for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1228,7 +1194,7 @@ def sample_delete_intent(): [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent]. name (str): Required. The name of the intent to delete. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py index d59bfff8fb0b..0a35ed3294f6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py @@ -306,7 +306,7 @@ async def sample_list_pages(): [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. parent (:class:`str`): Required. The flow to list all pages for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -426,7 +426,7 @@ async def sample_get_page(): [Pages.GetPage][google.cloud.dialogflow.cx.v3.Pages.GetPage]. name (:class:`str`): Required. The name of the page. Format: - ``projects//locations//agents//flows//pages/``. + ``projects//locations//agents//flows//pages/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -559,7 +559,7 @@ async def sample_create_page(): [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage]. parent (:class:`str`): Required. The flow to create a page for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -837,7 +837,7 @@ async def sample_delete_page(): [Pages.DeletePage][google.cloud.dialogflow.cx.v3.Pages.DeletePage]. name (:class:`str`): Required. The name of the page to delete. Format: - ``projects//locations//agents//Flows//pages/``. + ``projects//locations//agents//Flows//pages/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py index 5226752450f4..215b7620b90b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py @@ -591,36 +591,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PagesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -630,13 +600,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PagesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -848,7 +814,7 @@ def sample_list_pages(): [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. parent (str): Required. The flow to list all pages for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -965,7 +931,7 @@ def sample_get_page(): [Pages.GetPage][google.cloud.dialogflow.cx.v3.Pages.GetPage]. name (str): Required. The name of the page. Format: - ``projects//locations//agents//flows//pages/``. + ``projects//locations//agents//flows//pages/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1097,7 +1063,7 @@ def sample_create_page(): [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage]. parent (str): Required. The flow to create a page for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1369,7 +1335,7 @@ def sample_delete_page(): [Pages.DeletePage][google.cloud.dialogflow.cx.v3.Pages.DeletePage]. name (str): Required. The name of the page to delete. Format: - ``projects//locations//agents//Flows//pages/``. + ``projects//locations//agents//Flows//pages/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py index 381146b6dba2..844540653400 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py @@ -340,7 +340,7 @@ async def sample_create_security_settings(): Required. The location to create an [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -465,7 +465,7 @@ async def sample_get_security_settings(): [SecuritySettingsService.GetSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.GetSecuritySettings]. name (:class:`str`): Required. Resource name of the settings. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -712,7 +712,7 @@ async def sample_list_security_settings(): parent (:class:`str`): Required. The location to list all security settings for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -834,7 +834,7 @@ async def sample_delete_security_settings(): Required. The name of the [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] to delete. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py index 61a8d2a728f5..a76da48e7b7b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py @@ -514,36 +514,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SecuritySettingsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -553,13 +523,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SecuritySettingsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -790,7 +756,7 @@ def sample_create_security_settings(): Required. The location to create an [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -912,7 +878,7 @@ def sample_get_security_settings(): [SecuritySettingsService.GetSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.GetSecuritySettings]. name (str): Required. Resource name of the settings. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1153,7 +1119,7 @@ def sample_list_security_settings(): parent (str): Required. The location to list all security settings for. Format: - ``projects//locations/``. + ``projects//locations/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1272,7 +1238,7 @@ def sample_delete_security_settings(): Required. The name of the [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] to delete. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py index f65ed87817da..a7f04fb18965 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py @@ -320,9 +320,9 @@ async def sample_list_session_entity_types(): parent (:class:`str`): Required. The session to list all session entity types from. Format: - ``projects//locations//agents//sessions/`` + ``projects//locations//agents//sessions/`` or - ``projects//locations//agents//environments//sessions/``. + ``projects//locations//agents//environments//sessions/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -446,9 +446,9 @@ async def sample_get_session_entity_type(): [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.GetSessionEntityType]. name (:class:`str`): Required. The name of the session entity type. Format: - ``projects//locations//agents//sessions//entityTypes/`` + ``projects//locations//agents//sessions//entityTypes/`` or - ``projects//locations//agents//environments//sessions//entityTypes/``. + ``projects//locations//agents//environments//sessions//entityTypes/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -583,9 +583,9 @@ async def sample_create_session_entity_type(): parent (:class:`str`): Required. The session to create a session entity type for. Format: - ``projects//locations//agents//sessions/`` + ``projects//locations//agents//sessions/`` or - ``projects//locations//agents//environments//sessions/``. + ``projects//locations//agents//environments//sessions/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -729,9 +729,9 @@ async def sample_update_session_entity_type(): [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.UpdateSessionEntityType]. session_entity_type (:class:`google.cloud.dialogflowcx_v3.types.SessionEntityType`): Required. The session entity type to update. Format: - ``projects//locations//agents//sessions//entityTypes/`` + ``projects//locations//agents//sessions//entityTypes/`` or - ``projects//locations//agents//environments//sessions//entityTypes/``. + ``projects//locations//agents//environments//sessions//entityTypes/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -866,9 +866,9 @@ async def sample_delete_session_entity_type(): name (:class:`str`): Required. The name of the session entity type to delete. Format: - ``projects//locations//agents//sessions//entityTypes/`` + ``projects//locations//agents//sessions//entityTypes/`` or - ``projects//locations//agents//environments//sessions//entityTypes/``. + ``projects//locations//agents//environments//sessions//entityTypes/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py index a0bc8b177ff3..442cc41cd100 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py @@ -475,36 +475,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SessionEntityTypesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -514,13 +484,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SessionEntityTypesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -744,9 +710,9 @@ def sample_list_session_entity_types(): parent (str): Required. The session to list all session entity types from. Format: - ``projects//locations//agents//sessions/`` + ``projects//locations//agents//sessions/`` or - ``projects//locations//agents//environments//sessions/``. + ``projects//locations//agents//environments//sessions/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -869,9 +835,9 @@ def sample_get_session_entity_type(): [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.GetSessionEntityType]. name (str): Required. The name of the session entity type. Format: - ``projects//locations//agents//sessions//entityTypes/`` + ``projects//locations//agents//sessions//entityTypes/`` or - ``projects//locations//agents//environments//sessions//entityTypes/``. + ``projects//locations//agents//environments//sessions//entityTypes/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -1003,9 +969,9 @@ def sample_create_session_entity_type(): parent (str): Required. The session to create a session entity type for. Format: - ``projects//locations//agents//sessions/`` + ``projects//locations//agents//sessions/`` or - ``projects//locations//agents//environments//sessions/``. + ``projects//locations//agents//environments//sessions/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -1148,9 +1114,9 @@ def sample_update_session_entity_type(): [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.UpdateSessionEntityType]. session_entity_type (google.cloud.dialogflowcx_v3.types.SessionEntityType): Required. The session entity type to update. Format: - ``projects//locations//agents//sessions//entityTypes/`` + ``projects//locations//agents//sessions//entityTypes/`` or - ``projects//locations//agents//environments//sessions//entityTypes/``. + ``projects//locations//agents//environments//sessions//entityTypes/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. @@ -1284,9 +1250,9 @@ def sample_delete_session_entity_type(): name (str): Required. The name of the session entity type to delete. Format: - ``projects//locations//agents//sessions//entityTypes/`` + ``projects//locations//agents//sessions//entityTypes/`` or - ``projects//locations//agents//environments//sessions//entityTypes/``. + ``projects//locations//agents//environments//sessions//entityTypes/``. If ``Environment ID`` is not specified, we assume default 'draft' environment. diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py index 8d6f63b89fa2..ffe2e5f977bc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py @@ -691,36 +691,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SessionsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -730,13 +700,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SessionsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py index 797c3826db24..af8acba825d2 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py @@ -327,7 +327,7 @@ async def sample_list_test_cases(): [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. parent (:class:`str`): Required. The agent to list all pages for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -445,7 +445,7 @@ async def sample_batch_delete_test_cases(): [TestCases.BatchDeleteTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchDeleteTestCases]. parent (:class:`str`): Required. The agent to delete test cases from. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -542,7 +542,7 @@ async def sample_get_test_case(): [TestCases.GetTestCase][google.cloud.dialogflow.cx.v3.TestCases.GetTestCase]. name (:class:`str`): Required. The name of the testcase. Format: - ``projects//locations//agents//testCases/``. + ``projects//locations//agents//testCases/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -651,7 +651,7 @@ async def sample_create_test_case(): [TestCases.CreateTestCase][google.cloud.dialogflow.cx.v3.TestCases.CreateTestCase]. parent (:class:`str`): Required. The agent to create the test case for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1411,7 +1411,7 @@ async def sample_list_test_case_results(): [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. parent (:class:`str`): Required. The test case to list results for. Format: - ``projects//locations//agents// testCases/``. + ``projects//locations//agents//testCases/``. Specify a ``-`` as a wildcard for TestCase ID to list results across multiple test cases. @@ -1533,7 +1533,7 @@ async def sample_get_test_case_result(): [TestCases.GetTestCaseResult][google.cloud.dialogflow.cx.v3.TestCases.GetTestCaseResult]. name (:class:`str`): Required. The name of the testcase. Format: - ``projects//locations//agents//testCases//results/``. + ``projects//locations//agents//testCases//results/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py index 2233b06d8a3d..0b600f19c03d 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py @@ -692,36 +692,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TestCasesClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -731,13 +701,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TestCasesClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -949,7 +915,7 @@ def sample_list_test_cases(): [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. parent (str): Required. The agent to list all pages for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1064,7 +1030,7 @@ def sample_batch_delete_test_cases(): [TestCases.BatchDeleteTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchDeleteTestCases]. parent (str): Required. The agent to delete test cases from. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1158,7 +1124,7 @@ def sample_get_test_case(): [TestCases.GetTestCase][google.cloud.dialogflow.cx.v3.TestCases.GetTestCase]. name (str): Required. The name of the testcase. Format: - ``projects//locations//agents//testCases/``. + ``projects//locations//agents//testCases/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1264,7 +1230,7 @@ def sample_create_test_case(): [TestCases.CreateTestCase][google.cloud.dialogflow.cx.v3.TestCases.CreateTestCase]. parent (str): Required. The agent to create the test case for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2008,7 +1974,7 @@ def sample_list_test_case_results(): [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. parent (str): Required. The test case to list results for. Format: - ``projects//locations//agents// testCases/``. + ``projects//locations//agents//testCases/``. Specify a ``-`` as a wildcard for TestCase ID to list results across multiple test cases. @@ -2127,7 +2093,7 @@ def sample_get_test_case_result(): [TestCases.GetTestCaseResult][google.cloud.dialogflow.cx.v3.TestCases.GetTestCaseResult]. name (str): Required. The name of the testcase. Format: - ``projects//locations//agents//testCases//results/``. + ``projects//locations//agents//testCases//results/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py index e671918afa1a..2e873c7ba930 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py @@ -330,7 +330,7 @@ async def sample_list_transition_route_groups(): parent (:class:`str`): Required. The flow to list all transition route groups for. Format: - ``projects//locations//agents//flows/`` + ``projects//locations//agents//flows/`` or \`projects//locations//agents/. This corresponds to the ``parent`` field @@ -458,9 +458,9 @@ async def sample_get_transition_route_group(): Required. The name of the [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. Format: - ``projects//locations//agents//flows//transitionRouteGroups/`` + ``projects//locations//agents//flows//transitionRouteGroups/`` or - ``projects//locations//agents//transitionRouteGroups/``. + ``projects//locations//agents//transitionRouteGroups/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -587,9 +587,9 @@ async def sample_create_transition_route_group(): Required. The flow to create an [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] for. Format: - ``projects//locations//agents//flows/`` + ``projects//locations//agents//flows/`` or - ``projects//locations//agents/`` + ``projects//locations//agents/`` for agent-level groups. This corresponds to the ``parent`` field @@ -853,9 +853,9 @@ async def sample_delete_transition_route_group(): Required. The name of the [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] to delete. Format: - ``projects//locations//agents//flows//transitionRouteGroups/`` + ``projects//locations//agents//flows//transitionRouteGroups/`` or - ``projects//locations//agents//transitionRouteGroups/``. + ``projects//locations//agents//transitionRouteGroups/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py index 2238c9cb230a..4845cc951238 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py @@ -575,36 +575,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = TransitionRouteGroupsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -614,13 +584,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or TransitionRouteGroupsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -844,7 +810,7 @@ def sample_list_transition_route_groups(): parent (str): Required. The flow to list all transition route groups for. Format: - ``projects//locations//agents//flows/`` + ``projects//locations//agents//flows/`` or \`projects//locations//agents/. This corresponds to the ``parent`` field @@ -971,9 +937,9 @@ def sample_get_transition_route_group(): Required. The name of the [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. Format: - ``projects//locations//agents//flows//transitionRouteGroups/`` + ``projects//locations//agents//flows//transitionRouteGroups/`` or - ``projects//locations//agents//transitionRouteGroups/``. + ``projects//locations//agents//transitionRouteGroups/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1099,9 +1065,9 @@ def sample_create_transition_route_group(): Required. The flow to create an [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] for. Format: - ``projects//locations//agents//flows/`` + ``projects//locations//agents//flows/`` or - ``projects//locations//agents/`` + ``projects//locations//agents/`` for agent-level groups. This corresponds to the ``parent`` field @@ -1363,9 +1329,9 @@ def sample_delete_transition_route_group(): Required. The name of the [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] to delete. Format: - ``projects//locations//agents//flows//transitionRouteGroups/`` + ``projects//locations//agents//flows//transitionRouteGroups/`` or - ``projects//locations//agents//transitionRouteGroups/``. + ``projects//locations//agents//transitionRouteGroups/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py index e511f6ec9fc7..5213ce5b4703 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py @@ -305,7 +305,7 @@ async def sample_list_versions(): parent (:class:`str`): Required. The [Flow][google.cloud.dialogflow.cx.v3.Flow] to list all versions for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -428,7 +428,7 @@ async def sample_get_version(): Required. The name of the [Version][google.cloud.dialogflow.cx.v3.Version]. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -555,7 +555,7 @@ async def sample_create_version(): to create an [Version][google.cloud.dialogflow.cx.v3.Version] for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -798,7 +798,7 @@ async def sample_delete_version(): Required. The name of the [Version][google.cloud.dialogflow.cx.v3.Version] to delete. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -911,7 +911,7 @@ async def sample_load_version(): Required. The [Version][google.cloud.dialogflow.cx.v3.Version] to be loaded to draft flow. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1041,7 +1041,7 @@ async def sample_compare_versions(): draft version of the specified flow. Format: - ``projects//locations//agents/ /flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``base_version`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py index 81324bc63c07..65622f5a8024 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py @@ -476,36 +476,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = VersionsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -515,13 +485,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or VersionsClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -735,7 +701,7 @@ def sample_list_versions(): parent (str): Required. The [Flow][google.cloud.dialogflow.cx.v3.Flow] to list all versions for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -855,7 +821,7 @@ def sample_get_version(): Required. The name of the [Version][google.cloud.dialogflow.cx.v3.Version]. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -979,7 +945,7 @@ def sample_create_version(): to create an [Version][google.cloud.dialogflow.cx.v3.Version] for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1216,7 +1182,7 @@ def sample_delete_version(): Required. The name of the [Version][google.cloud.dialogflow.cx.v3.Version] to delete. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1326,7 +1292,7 @@ def sample_load_version(): Required. The [Version][google.cloud.dialogflow.cx.v3.Version] to be loaded to draft flow. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1453,7 +1419,7 @@ def sample_compare_versions(): draft version of the specified flow. Format: - ``projects//locations//agents/ /flows//versions/``. + ``projects//locations//agents//flows//versions/``. This corresponds to the ``base_version`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py index 0be12b5d5354..60c43a1432d0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py @@ -301,7 +301,7 @@ async def sample_list_webhooks(): [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. parent (:class:`str`): Required. The agent to list all webhooks for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -421,7 +421,7 @@ async def sample_get_webhook(): [Webhooks.GetWebhook][google.cloud.dialogflow.cx.v3.Webhooks.GetWebhook]. name (:class:`str`): Required. The name of the webhook. Format: - ``projects//locations//agents//webhooks/``. + ``projects//locations//agents//webhooks/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -538,7 +538,7 @@ async def sample_create_webhook(): [Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.CreateWebhook]. parent (:class:`str`): Required. The agent to create a webhook for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -779,7 +779,7 @@ async def sample_delete_webhook(): [Webhooks.DeleteWebhook][google.cloud.dialogflow.cx.v3.Webhooks.DeleteWebhook]. name (:class:`str`): Required. The name of the webhook to delete. Format: - ``projects//locations//agents//webhooks/``. + ``projects//locations//agents//webhooks/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py index 3a8393555818..6a3830f789bb 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py @@ -493,36 +493,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = WebhooksClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -532,13 +502,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or WebhooksClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): @@ -751,7 +717,7 @@ def sample_list_webhooks(): [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. parent (str): Required. The agent to list all webhooks for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -868,7 +834,7 @@ def sample_get_webhook(): [Webhooks.GetWebhook][google.cloud.dialogflow.cx.v3.Webhooks.GetWebhook]. name (str): Required. The name of the webhook. Format: - ``projects//locations//agents//webhooks/``. + ``projects//locations//agents//webhooks/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -982,7 +948,7 @@ def sample_create_webhook(): [Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.CreateWebhook]. parent (str): Required. The agent to create a webhook for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1217,7 +1183,7 @@ def sample_delete_webhook(): [Webhooks.DeleteWebhook][google.cloud.dialogflow.cx.v3.Webhooks.DeleteWebhook]. name (str): Required. The name of the webhook to delete. Format: - ``projects//locations//agents//webhooks/``. + ``projects//locations//agents//webhooks/``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py index 83ae083d693f..6275811e66fe 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py @@ -171,7 +171,7 @@ class LoggingSettings(proto.Message): Attributes: enable_stackdriver_logging (bool): - Enables StackDriver logging. + Enables Google Cloud Logging. enable_interaction_logging (bool): Enables DF Interaction logging. enable_consent_based_redaction (bool): diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/agent.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/agent.py index f296cbe1e9c5..377328b36af8 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/agent.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/agent.py @@ -92,7 +92,7 @@ class Agent(proto.Message): method. [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent] populates the name automatically. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. display_name (str): Required. The human-readable name of the agent, unique within the location. @@ -127,12 +127,12 @@ class Agent(proto.Message): flow will be automatically created when the agent is created, and can only be deleted by deleting the agent. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. security_settings (str): Name of the [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] reference for the agent. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. enable_stackdriver_logging (bool): Indicates if stackdriver logging is enabled for the agent. Please use @@ -171,6 +171,9 @@ class Agent(proto.Message): personalization_settings (google.cloud.dialogflowcx_v3.types.Agent.PersonalizationSettings): Optional. Settings for end user personalization. + client_certificate_settings (google.cloud.dialogflowcx_v3.types.Agent.ClientCertificateSettings): + Optional. Settings for custom client + certificates. """ class GitIntegrationSettings(proto.Message): @@ -287,6 +290,39 @@ class PersonalizationSettings(proto.Message): message=struct_pb2.Struct, ) + class ClientCertificateSettings(proto.Message): + r"""Settings for custom client certificates. + + Attributes: + ssl_certificate (str): + Required. The ssl certificate encoded in PEM + format. This string must include the begin + header and end footer lines. + private_key (str): + Required. The name of the SecretManager secret version + resource storing the private key encoded in PEM format. + Format: + ``projects/{project}/secrets/{secret}/versions/{version}`` + passphrase (str): + Optional. The name of the SecretManager secret version + resource storing the passphrase. 'passphrase' should be left + unset if the private key is not encrypted. Format: + ``projects/{project}/secrets/{secret}/versions/{version}`` + """ + + ssl_certificate: str = proto.Field( + proto.STRING, + number=1, + ) + private_key: str = proto.Field( + proto.STRING, + number=2, + ) + passphrase: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -375,6 +411,11 @@ class PersonalizationSettings(proto.Message): number=42, message=PersonalizationSettings, ) + client_certificate_settings: ClientCertificateSettings = proto.Field( + proto.MESSAGE, + number=43, + message=ClientCertificateSettings, + ) class ListAgentsRequest(proto.Message): @@ -384,7 +425,7 @@ class ListAgentsRequest(proto.Message): Attributes: parent (str): Required. The location to list all agents for. Format: - ``projects//locations/``. + ``projects//locations/``. page_size (int): The maximum number of items to return in a single page. By default 100 and at most 1000. @@ -443,7 +484,7 @@ class GetAgentRequest(proto.Message): Attributes: name (str): Required. The name of the agent. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. """ name: str = proto.Field( @@ -459,7 +500,7 @@ class CreateAgentRequest(proto.Message): Attributes: parent (str): Required. The location to create a agent for. Format: - ``projects//locations/``. + ``projects//locations/``. agent (google.cloud.dialogflowcx_v3.types.Agent): Required. The agent to create. """ @@ -507,7 +548,7 @@ class DeleteAgentRequest(proto.Message): Attributes: name (str): Required. The name of the agent to delete. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. """ name: str = proto.Field( @@ -523,7 +564,7 @@ class ExportAgentRequest(proto.Message): Attributes: name (str): Required. The name of the agent to export. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. agent_uri (str): Optional. The `Google Cloud Storage `__ URI to @@ -542,7 +583,7 @@ class ExportAgentRequest(proto.Message): environment (str): Optional. Environment name. If not set, draft environment is assumed. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. git_destination (google.cloud.dialogflowcx_v3.types.ExportAgentRequest.GitDestination): Optional. The Git branch to export the agent to. @@ -678,7 +719,7 @@ class RestoreAgentRequest(proto.Message): Attributes: name (str): Required. The name of the agent to restore into. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. agent_uri (str): The `Google Cloud Storage `__ URI to @@ -770,7 +811,7 @@ class ValidateAgentRequest(proto.Message): Attributes: name (str): Required. The agent to validate. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. language_code (str): If not specified, the agent's default language is used. @@ -793,7 +834,7 @@ class GetAgentValidationResultRequest(proto.Message): Attributes: name (str): Required. The agent name. Format: - ``projects//locations//agents//validationResult``. + ``projects//locations//agents//validationResult``. language_code (str): If not specified, the agent's default language is used. @@ -817,7 +858,7 @@ class AgentValidationResult(proto.Message): name (str): The unique identifier of the agent validation result. Format: - ``projects//locations//agents//validationResult``. + ``projects//locations//agents//validationResult``. flow_validation_results (MutableSequence[google.cloud.dialogflowcx_v3.types.FlowValidationResult]): Contains all flow validation results. """ @@ -843,7 +884,7 @@ class GetGenerativeSettingsRequest(proto.Message): Attributes: name (str): Required. Format: - ``projects//locations//agents//generativeSettings``. + ``projects//locations//agents//generativeSettings``. language_code (str): Required. Language code of the generative settings. diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/audio_config.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/audio_config.py index 48fc2f8fee4b..355ace1f322a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/audio_config.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/audio_config.py @@ -87,6 +87,9 @@ class AudioEncoding(proto.Enum): 5574. In other words, each RTP header is replaced with a single byte containing the block length. Only Speex wideband is supported. ``sample_rate_hertz`` must be 16000. + AUDIO_ENCODING_ALAW (8): + 8-bit samples that compand 13-bit audio + samples using G.711 PCMU/a-law. """ AUDIO_ENCODING_UNSPECIFIED = 0 AUDIO_ENCODING_LINEAR_16 = 1 @@ -96,6 +99,7 @@ class AudioEncoding(proto.Enum): AUDIO_ENCODING_AMR_WB = 5 AUDIO_ENCODING_OGG_OPUS = 6 AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE = 7 + AUDIO_ENCODING_ALAW = 8 class SpeechModelVariant(proto.Enum): @@ -186,6 +190,9 @@ class OutputAudioEncoding(proto.Enum): OUTPUT_AUDIO_ENCODING_MULAW (5): 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + OUTPUT_AUDIO_ENCODING_ALAW (6): + 8-bit samples that compand 13-bit audio + samples using G.711 PCMU/a-law. """ OUTPUT_AUDIO_ENCODING_UNSPECIFIED = 0 OUTPUT_AUDIO_ENCODING_LINEAR_16 = 1 @@ -193,6 +200,7 @@ class OutputAudioEncoding(proto.Enum): OUTPUT_AUDIO_ENCODING_MP3_64_KBPS = 4 OUTPUT_AUDIO_ENCODING_OGG_OPUS = 3 OUTPUT_AUDIO_ENCODING_MULAW = 5 + OUTPUT_AUDIO_ENCODING_ALAW = 6 class SpeechWordInfo(proto.Message): diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/changelog.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/changelog.py index a858a236569e..182d0b843d58 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/changelog.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/changelog.py @@ -38,7 +38,7 @@ class ListChangelogsRequest(proto.Message): Attributes: parent (str): Required. The agent containing the changelogs. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. filter (str): The filter string. Supports filter by user_email, resource, type and create_time. Some examples: @@ -120,7 +120,7 @@ class GetChangelogRequest(proto.Message): Attributes: name (str): Required. The name of the changelog to get. Format: - ``projects//locations//agents//changelogs/``. + ``projects//locations//agents//changelogs/``. """ name: str = proto.Field( @@ -135,7 +135,7 @@ class Changelog(proto.Message): Attributes: name (str): The unique identifier of the changelog. Format: - ``projects//locations//agents//changelogs/``. + ``projects//locations//agents//changelogs/``. user_email (str): Email address of the authenticated user. display_name (str): diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/deployment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/deployment.py index 154a24bfde39..88422ead299c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/deployment.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/deployment.py @@ -41,15 +41,14 @@ class Deployment(proto.Message): Attributes: name (str): The name of the deployment. - Format: projects//locations//agents//environments//deployments/. + Format: + + projects//locations//agents//environments//deployments/. flow_version (str): The name of the flow version for this - deployment. Format: projects//locations//agents//flows//versions/. + deployment. Format: + + projects//locations//agents//flows//versions/. state (google.cloud.dialogflowcx_v3.types.Deployment.State): The current state of the deployment. result (google.cloud.dialogflowcx_v3.types.Deployment.Result): @@ -84,13 +83,12 @@ class Result(proto.Message): Attributes: deployment_test_results (MutableSequence[str]): Results of test cases running before the deployment. Format: - ``projects//locations//agents//testCases//results/``. + ``projects//locations//agents//testCases//results/``. experiment (str): The name of the experiment triggered by this - deployment. Format: projects//locations//agents//environments//experiments/. + deployment. Format: + + projects//locations//agents//environments//experiments/. """ deployment_test_results: MutableSequence[str] = proto.RepeatedField( @@ -141,7 +139,7 @@ class ListDeploymentsRequest(proto.Message): Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. page_size (int): The maximum number of items to return in a single page. By default 20 and at most 100. @@ -204,7 +202,7 @@ class GetDeploymentRequest(proto.Message): Required. The name of the [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. Format: - ``projects//locations//agents//environments//deployments/``. + ``projects//locations//agents//environments//deployments/``. """ name: str = proto.Field( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/entity_type.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/entity_type.py index a81aa7aa8460..7403fe2cfe57 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/entity_type.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/entity_type.py @@ -80,7 +80,7 @@ class EntityType(proto.Message): The unique identifier of the entity type. Required for [EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.UpdateEntityType]. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. display_name (str): Required. The human-readable name of the entity type, unique within the agent. @@ -253,10 +253,10 @@ class ExportEntityTypesRequest(proto.Message): parent (str): Required. The name of the parent agent to export entity types. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. entity_types (MutableSequence[str]): Required. The name of the entity types to export. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. entity_types_uri (str): Optional. The `Google Cloud Storage `__ URI to @@ -401,7 +401,7 @@ class ImportEntityTypesRequest(proto.Message): Attributes: parent (str): Required. The agent to import the entity types into. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. entity_types_uri (str): The `Google Cloud Storage `__ URI to @@ -424,7 +424,7 @@ class ImportEntityTypesRequest(proto.Message): types. target_entity_type (str): Optional. The target entity type to import into. Format: - ``projects//locations//agents//entity_types/``. + ``projects//locations//agents//entity_types/``. If set, there should be only one entity type included in [entity_types][google.cloud.dialogflow.cx.v3.ImportEntityTypesRequest.entity_types], of which the type should match the type of the target entity @@ -501,7 +501,7 @@ class ImportEntityTypesResponse(proto.Message): Attributes: entity_types (MutableSequence[str]): The unique identifier of the imported entity types. Format: - ``projects//locations//agents//entity_types/``. + ``projects//locations//agents//entity_types/``. conflicting_resources (google.cloud.dialogflowcx_v3.types.ImportEntityTypesResponse.ConflictingResources): Info which resources have conflicts when [REPORT_CONFLICT][ImportEntityTypesResponse.REPORT_CONFLICT] @@ -556,7 +556,7 @@ class ListEntityTypesRequest(proto.Message): Attributes: parent (str): Required. The agent to list all entity types for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. language_code (str): The language to list entity types for. The following fields are language dependent: @@ -632,7 +632,7 @@ class GetEntityTypeRequest(proto.Message): Attributes: name (str): Required. The name of the entity type. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. language_code (str): The language to retrieve the entity type for. The following fields are language dependent: @@ -665,7 +665,7 @@ class CreateEntityTypeRequest(proto.Message): Attributes: parent (str): Required. The agent to create a entity type for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. entity_type (google.cloud.dialogflowcx_v3.types.EntityType): Required. The entity type to create. language_code (str): @@ -743,7 +743,7 @@ class DeleteEntityTypeRequest(proto.Message): Attributes: name (str): Required. The name of the entity type to delete. Format: - ``projects//locations//agents//entityTypes/``. + ``projects//locations//agents//entityTypes/``. force (bool): This field has no effect for entity type not being used. For entity types that are used by intents or pages: diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/environment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/environment.py index 95c32c081513..0f83edb01b94 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/environment.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/environment.py @@ -62,7 +62,7 @@ class Environment(proto.Message): Attributes: name (str): The name of the environment. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. display_name (str): Required. The human-readable name of the environment (unique in an agent). Limit of 64 @@ -92,9 +92,13 @@ class VersionConfig(proto.Message): Attributes: version (str): - Required. Format: projects//locations//agents//flows//versions/. + Required. Both flow and playbook versions are + supported. Format for flow version: + + projects//locations//agents//flows//versions/. + Format for playbook version: + + projects//locations//agents//playbooks//versions/. """ version: str = proto.Field( @@ -109,7 +113,7 @@ class TestCasesConfig(proto.Message): test_cases (MutableSequence[str]): A list of test case names to run. They should be under the same agent. Format of each test case name: - ``projects//locations/ /agents//testCases/`` + ``projects//locations//agents//testCases/`` enable_continuous_run (bool): Whether to run test cases in [TestCasesConfig.test_cases][google.cloud.dialogflow.cx.v3.Environment.TestCasesConfig.test_cases] @@ -195,7 +199,7 @@ class ListEnvironmentsRequest(proto.Message): parent (str): Required. The [Agent][google.cloud.dialogflow.cx.v3.Agent] to list all environments for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. page_size (int): The maximum number of items to return in a single page. By default 20 and at most 100. @@ -258,7 +262,7 @@ class GetEnvironmentRequest(proto.Message): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment]. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. """ name: str = proto.Field( @@ -277,7 +281,7 @@ class CreateEnvironmentRequest(proto.Message): to create an [Environment][google.cloud.dialogflow.cx.v3.Environment] for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. environment (google.cloud.dialogflowcx_v3.types.Environment): Required. The environment to create. """ @@ -326,7 +330,7 @@ class DeleteEnvironmentRequest(proto.Message): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment] to delete. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. """ name: str = proto.Field( @@ -343,7 +347,7 @@ class LookupEnvironmentHistoryRequest(proto.Message): name (str): Required. Resource name of the environment to look up the history for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. page_size (int): The maximum number of items to return in a single page. By default 100 and at most 1000. @@ -403,7 +407,7 @@ class ContinuousTestResult(proto.Message): Attributes: name (str): The resource name for the continuous test result. Format: - ``projects//locations//agents//environments//continuousTestResults/``. + ``projects//locations//agents//environments//continuousTestResults/``. result (google.cloud.dialogflowcx_v3.types.ContinuousTestResult.AggregatedTestResult): The result of this continuous test run, i.e. whether all the tests in this continuous test @@ -458,7 +462,7 @@ class RunContinuousTestRequest(proto.Message): Attributes: environment (str): Required. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. """ environment: str = proto.Field( @@ -507,7 +511,7 @@ class ListContinuousTestResultsRequest(proto.Message): Attributes: parent (str): Required. The environment to list results for. Format: - ``projects//locations//agents// environments/``. + ``projects//locations//agents//environments/``. page_size (int): The maximum number of items to return in a single page. By default 100 and at most 1000. @@ -566,10 +570,10 @@ class DeployFlowRequest(proto.Message): Attributes: environment (str): Required. The environment to deploy the flow to. Format: - ``projects//locations//agents// environments/``. + ``projects//locations//agents//environments/``. flow_version (str): Required. The flow version to deploy. Format: - ``projects//locations//agents// flows//versions/``. + ``projects//locations//agents//flows//versions/``. """ environment: str = proto.Field( @@ -594,7 +598,7 @@ class DeployFlowResponse(proto.Message): The name of the flow version [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. Format: - ``projects//locations//agents// environments//deployments/``. + ``projects//locations//agents//environments//deployments/``. """ environment: "Environment" = proto.Field( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/experiment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/experiment.py index 987140cbf341..4cd2e349f989 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/experiment.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/experiment.py @@ -48,10 +48,9 @@ class Experiment(proto.Message): Attributes: name (str): The name of the experiment. - Format: projects//locations//agents//environments//experiments/.. + Format: + + projects//locations//agents//environments//experiments/. display_name (str): Required. The human-readable name of the experiment (unique in an environment). Limit of @@ -313,7 +312,7 @@ class VersionMetrics(proto.Message): version (str): The name of the flow [Version][google.cloud.dialogflow.cx.v3.Version]. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. metrics (MutableSequence[google.cloud.dialogflowcx_v3.types.Experiment.Result.Metric]): The metrics and corresponding confidence intervals in the inference result. @@ -436,7 +435,7 @@ class Variant(proto.Message): Attributes: version (str): The name of the flow version. Format: - ``projects//locations//agents//flows//versions/``. + ``projects//locations//agents//flows//versions/``. traffic_allocation (float): Percentage of the traffic which should be routed to this version of flow. Traffic @@ -598,7 +597,7 @@ class ListExperimentsRequest(proto.Message): Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. page_size (int): The maximum number of items to return in a single page. By default 20 and at most 100. @@ -661,7 +660,7 @@ class GetExperimentRequest(proto.Message): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment]. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. """ name: str = proto.Field( @@ -680,7 +679,7 @@ class CreateExperimentRequest(proto.Message): to create an [Environment][google.cloud.dialogflow.cx.v3.Environment] for. Format: - ``projects//locations//agents//environments/``. + ``projects//locations//agents//environments/``. experiment (google.cloud.dialogflowcx_v3.types.Experiment): Required. The experiment to create. """ @@ -729,7 +728,7 @@ class DeleteExperimentRequest(proto.Message): Required. The name of the [Environment][google.cloud.dialogflow.cx.v3.Environment] to delete. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. """ name: str = proto.Field( @@ -745,7 +744,7 @@ class StartExperimentRequest(proto.Message): Attributes: name (str): Required. Resource name of the experiment to start. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. """ name: str = proto.Field( @@ -761,7 +760,7 @@ class StopExperimentRequest(proto.Message): Attributes: name (str): Required. Resource name of the experiment to stop. Format: - ``projects//locations//agents//environments//experiments/``. + ``projects//locations//agents//environments//experiments/``. """ name: str = proto.Field( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py index 6a4f64ff33b6..f51cca109145 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py @@ -65,7 +65,9 @@ class NluSettings(proto.Message): value, then a no-match event will be triggered. The score values range from 0.0 (completely uncertain) to 1.0 (completely certain). If set - to 0.0, the default of 0.3 is used. + to 0.0, the default of 0.3 is used. You can set + a separate classification threshold for the flow + in each language enabled for the agent. model_training_mode (google.cloud.dialogflowcx_v3.types.NluSettings.ModelTrainingMode): Indicates NLU model training mode. """ @@ -142,7 +144,7 @@ class Flow(proto.Message): Attributes: name (str): The unique identifier of the flow. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. display_name (str): Required. The human-readable name of the flow. @@ -195,9 +197,10 @@ class Flow(proto.Message): groups defined in the page have higher priority than those defined in the flow. - Format:\ ``projects//locations//agents//flows//transitionRouteGroups/`` + Format: + ``projects//locations//agents//flows//transitionRouteGroups/`` or - ``projects//locations//agents//transitionRouteGroups/`` + ``projects//locations//agents//transitionRouteGroups/`` for agent-level groups. nlu_settings (google.cloud.dialogflowcx_v3.types.NluSettings): NLU related settings of the flow. @@ -305,7 +308,7 @@ class CreateFlowRequest(proto.Message): Attributes: parent (str): Required. The agent to create a flow for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. flow (google.cloud.dialogflowcx_v3.types.Flow): Required. The flow to create. language_code (str): @@ -345,7 +348,7 @@ class DeleteFlowRequest(proto.Message): Attributes: name (str): Required. The name of the flow to delete. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. force (bool): This field has no effect for flows with no incoming transitions. For flows with incoming transitions: @@ -377,7 +380,7 @@ class ListFlowsRequest(proto.Message): Attributes: parent (str): Required. The agent containing the flows. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. page_size (int): The maximum number of items to return in a single page. By default 100 and at most 1000. @@ -454,7 +457,7 @@ class GetFlowRequest(proto.Message): Attributes: name (str): Required. The name of the flow to get. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. language_code (str): The language to retrieve the flow for. The following fields are language dependent: @@ -530,7 +533,7 @@ class TrainFlowRequest(proto.Message): Attributes: name (str): Required. The flow to train. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. """ name: str = proto.Field( @@ -546,7 +549,7 @@ class ValidateFlowRequest(proto.Message): Attributes: name (str): Required. The flow to validate. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. language_code (str): If not specified, the agent's default language is used. @@ -569,7 +572,7 @@ class GetFlowValidationResultRequest(proto.Message): Attributes: name (str): Required. The flow name. Format: - ``projects//locations//agents//flows//validationResult``. + ``projects//locations//agents//flows//validationResult``. language_code (str): If not specified, the agent's default language is used. @@ -592,7 +595,7 @@ class FlowValidationResult(proto.Message): Attributes: name (str): The unique identifier of the flow validation result. Format: - ``projects//locations//agents//flows//validationResult``. + ``projects//locations//agents//flows//validationResult``. validation_messages (MutableSequence[google.cloud.dialogflowcx_v3.types.ValidationMessage]): Contains all validation messages. update_time (google.protobuf.timestamp_pb2.Timestamp): @@ -631,7 +634,7 @@ class ImportFlowRequest(proto.Message): Attributes: parent (str): Required. The agent to import the flow into. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. flow_uri (str): The `Google Cloud Storage `__ URI to @@ -730,7 +733,7 @@ class ImportFlowResponse(proto.Message): Attributes: flow (str): The unique identifier of the new flow. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. """ flow: str = proto.Field( @@ -746,7 +749,7 @@ class ExportFlowRequest(proto.Message): Attributes: name (str): Required. The name of the flow to export. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. flow_uri (str): Optional. The `Google Cloud Storage `__ URI to diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py index 4b3d8d69a441..de8225df6484 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py @@ -60,7 +60,7 @@ class Fulfillment(proto.Message): to the user. webhook (str): The webhook to call. Format: - ``projects//locations//agents//webhooks/``. + ``projects//locations//agents//webhooks/``. return_partial_responses (bool): Whether Dialogflow should return currently queued fulfillment response messages in diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generative_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generative_settings.py index e733391bf25b..76232ba2e72c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generative_settings.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generative_settings.py @@ -35,7 +35,7 @@ class GenerativeSettings(proto.Message): Attributes: name (str): Format: - ``projects//locations//agents//generativeSettings``. + ``projects//locations//agents//generativeSettings``. fallback_settings (google.cloud.dialogflowcx_v3.types.GenerativeSettings.FallbackSettings): Settings for Generative Fallback. generative_safety_settings (google.cloud.dialogflowcx_v3.types.SafetySettings): diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generator.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generator.py index 08e7d22045f2..d5150034692b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generator.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generator.py @@ -47,7 +47,7 @@ class Generator(proto.Message): [Generators.UpdateGenerator][google.cloud.dialogflow.cx.v3.Generators.UpdateGenerator] method. [Generators.CreateGenerate][] populates the name automatically. Format: - ``projects//locations//agents//generators/``. + ``projects//locations//agents//generators/``. display_name (str): Required. The human-readable name of the generator, unique within the agent. The prompt @@ -61,6 +61,9 @@ class Generator(proto.Message): placeholders (MutableSequence[google.cloud.dialogflowcx_v3.types.Generator.Placeholder]): Optional. List of custom placeholders in the prompt text. + model_parameter (google.cloud.dialogflowcx_v3.types.Generator.ModelParameter): + Parameters passed to the LLM to configure its + behavior. """ class Placeholder(proto.Message): @@ -83,6 +86,63 @@ class Placeholder(proto.Message): number=2, ) + class ModelParameter(proto.Message): + r"""Parameters to be passed to the LLM. If not set, default + values will be used. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + temperature (float): + The temperature used for sampling. Temperature sampling + occurs after both topP and topK have been applied. Valid + range: [0.0, 1.0] Low temperature = less random. High + temperature = more random. + + This field is a member of `oneof`_ ``_temperature``. + max_decode_steps (int): + The maximum number of tokens to generate. + + This field is a member of `oneof`_ ``_max_decode_steps``. + top_p (float): + If set, only the tokens comprising the top top_p probability + mass are considered. If both top_p and top_k are set, top_p + will be used for further refining candidates selected with + top_k. Valid range: (0.0, 1.0]. Small topP = less random. + Large topP = more random. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + If set, the sampling process in each step is limited to the + top_k tokens with highest probabilities. Valid range: [1, + 40] or 1000+. Small topK = less random. Large topK = more + random. + + This field is a member of `oneof`_ ``_top_k``. + """ + + temperature: float = proto.Field( + proto.FLOAT, + number=1, + optional=True, + ) + max_decode_steps: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -101,6 +161,11 @@ class Placeholder(proto.Message): number=5, message=Placeholder, ) + model_parameter: ModelParameter = proto.Field( + proto.MESSAGE, + number=8, + message=ModelParameter, + ) class Phrase(proto.Message): @@ -125,7 +190,7 @@ class ListGeneratorsRequest(proto.Message): Attributes: parent (str): Required. The agent to list all generators for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. language_code (str): The language to list generators for. page_size (int): @@ -190,7 +255,7 @@ class GetGeneratorRequest(proto.Message): Attributes: name (str): Required. The name of the generator. Format: - ``projects//locations//agents//generators/``. + ``projects//locations//agents//generators/``. language_code (str): The language to list generators for. """ @@ -212,7 +277,7 @@ class CreateGeneratorRequest(proto.Message): Attributes: parent (str): Required. The agent to create a generator for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. generator (google.cloud.dialogflowcx_v3.types.Generator): Required. The generator to create. language_code (str): @@ -275,7 +340,7 @@ class DeleteGeneratorRequest(proto.Message): Attributes: name (str): Required. The name of the generator to delete. Format: - ``projects//locations//agents//generators/``. + ``projects//locations//agents//generators/``. force (bool): This field has no effect for generators not being used. For generators that are used by pages/flows/transition route diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py index 8ac632707e0d..b2e6f0b73167 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py @@ -77,7 +77,7 @@ class Intent(proto.Message): method. [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent] populates the name automatically. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. display_name (str): Required. The human-readable name of the intent, unique within the agent. @@ -214,11 +214,11 @@ class Parameter(proto.Message): [parts][google.cloud.dialogflow.cx.v3.Intent.TrainingPhrase.Part]. entity_type (str): Required. The entity type of the parameter. Format: - ``projects/-/locations/-/agents/-/entityTypes/`` + ``projects/-/locations/-/agents/-/entityTypes/`` for system entity types (for example, ``projects/-/locations/-/agents/-/entityTypes/sys.date``), or - ``projects//locations//agents//entityTypes/`` + ``projects//locations//agents//entityTypes/`` for developer entity types. is_list (bool): Indicates whether the parameter represents a @@ -294,7 +294,7 @@ class ListIntentsRequest(proto.Message): Attributes: parent (str): Required. The agent to list all intents for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. language_code (str): The language to list intents for. The following fields are language dependent: @@ -376,7 +376,7 @@ class GetIntentRequest(proto.Message): Attributes: name (str): Required. The name of the intent. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. language_code (str): The language to retrieve the intent for. The following fields are language dependent: @@ -407,7 +407,7 @@ class CreateIntentRequest(proto.Message): Attributes: parent (str): Required. The agent to create an intent for. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. intent (google.cloud.dialogflowcx_v3.types.Intent): Required. The intent to create. language_code (str): @@ -483,7 +483,7 @@ class DeleteIntentRequest(proto.Message): Attributes: name (str): Required. The name of the intent to delete. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. """ name: str = proto.Field( @@ -506,7 +506,7 @@ class ImportIntentsRequest(proto.Message): Attributes: parent (str): Required. The agent to import the intents into. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. intents_uri (str): The `Google Cloud Storage `__ URI to @@ -597,7 +597,7 @@ class ImportIntentsResponse(proto.Message): Attributes: intents (MutableSequence[str]): The unique identifier of the imported intents. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. conflicting_resources (google.cloud.dialogflowcx_v3.types.ImportIntentsResponse.ConflictingResources): Info which resources have conflicts when [REPORT_CONFLICT][ImportIntentsResponse.REPORT_CONFLICT] @@ -659,10 +659,10 @@ class ExportIntentsRequest(proto.Message): parent (str): Required. The name of the parent agent to export intents. Format: - ``projects//locations//agents/``. + ``projects//locations//agents/``. intents (MutableSequence[str]): Required. The name of the intents to export. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. intents_uri (str): Optional. The `Google Cloud Storage `__ URI to diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py index 9f53353936bc..19a8e64ee66e 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py @@ -72,7 +72,7 @@ class Page(proto.Message): method. [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage] populates the name automatically. Format: - ``projects//locations//agents//flows//pages/``. + ``projects//locations//agents//flows//pages/``. display_name (str): Required. The human-readable name of the page, unique within the flow. @@ -103,9 +103,9 @@ class Page(proto.Message): the same intent, then the first group in the ordered list takes precedence. - Format:\ ``projects//locations//agents//flows//transitionRouteGroups/`` + Format:\ ``projects//locations//agents//flows//transitionRouteGroups/`` or - ``projects//locations//agents//transitionRouteGroups/`` + ``projects//locations//agents//transitionRouteGroups/`` for agent-level groups. transition_routes (MutableSequence[google.cloud.dialogflowcx_v3.types.TransitionRoute]): A list of transitions for the transition rules of this page. @@ -219,11 +219,11 @@ class Parameter(proto.Message): form filling concludes. entity_type (str): Required. The entity type of the parameter. Format: - ``projects/-/locations/-/agents/-/entityTypes/`` + ``projects/-/locations/-/agents/-/entityTypes/`` for system entity types (for example, ``projects/-/locations/-/agents/-/entityTypes/sys.date``), or - ``projects//locations//agents//entityTypes/`` + ``projects//locations//agents//entityTypes/`` for developer entity types. is_list (bool): Indicates whether the parameter represents a @@ -394,12 +394,12 @@ class EventHandler(proto.Message): fulfillment for a handler handling webhooks. target_page (str): The target page to transition to. Format: - ``projects//locations//agents//flows//pages/``. + ``projects//locations//agents//flows//pages/``. This field is a member of `oneof`_ ``target``. target_flow (str): The target flow to transition to. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This field is a member of `oneof`_ ``target``. """ @@ -465,7 +465,7 @@ class TransitionRoute(proto.Message): intent (str): The unique identifier of an [Intent][google.cloud.dialogflow.cx.v3.Intent]. Format: - ``projects//locations//agents//intents/``. + ``projects//locations//agents//intents/``. Indicates that the transition can only happen when the given intent is matched. At least one of ``intent`` or ``condition`` must be specified. When both ``intent`` and @@ -490,12 +490,12 @@ class TransitionRoute(proto.Message): executed first. target_page (str): The target page to transition to. Format: - ``projects//locations//agents//flows//pages/``. + ``projects//locations//agents//flows//pages/``. This field is a member of `oneof`_ ``target``. target_flow (str): The target flow to transition to. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This field is a member of `oneof`_ ``target``. """ @@ -540,7 +540,7 @@ class ListPagesRequest(proto.Message): Attributes: parent (str): Required. The flow to list all pages for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. language_code (str): The language to list pages for. The following fields are language dependent: @@ -629,7 +629,7 @@ class GetPageRequest(proto.Message): Attributes: name (str): Required. The name of the page. Format: - ``projects//locations//agents//flows//pages/``. + ``projects//locations//agents//flows//pages/``. language_code (str): The language to retrieve the page for. The following fields are language dependent: @@ -675,7 +675,7 @@ class CreatePageRequest(proto.Message): Attributes: parent (str): Required. The flow to create a page for. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. page (google.cloud.dialogflowcx_v3.types.Page): Required. The page to create. language_code (str): @@ -781,7 +781,7 @@ class DeletePageRequest(proto.Message): Attributes: name (str): Required. The name of the page to delete. Format: - ``projects//locations//agents//Flows//pages/``. + ``projects//locations//agents//Flows//pages/``. force (bool): This field has no effect for pages with no incoming transitions. For pages with incoming transitions: @@ -833,12 +833,12 @@ class KnowledgeConnectorSettings(proto.Message): fulfillment. target_page (str): The target page to transition to. Format: - ``projects//locations//agents//flows//pages/``. + ``projects//locations//agents//flows//pages/``. This field is a member of `oneof`_ ``target``. target_flow (str): The target flow to transition to. Format: - ``projects//locations//agents//flows/``. + ``projects//locations//agents//flows/``. This field is a member of `oneof`_ ``target``. data_store_connections (MutableSequence[google.cloud.dialogflowcx_v3.types.DataStoreConnection]): diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/response_message.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/response_message.py index f7fad2e8e1ee..3a5203872c23 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/response_message.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/response_message.py @@ -160,7 +160,10 @@ class Text(proto.Message): Attributes: text (MutableSequence[str]): - Required. A collection of text responses. + Required. A collection of text response + variants. If multiple variants are defined, only + one text response variant is returned at + runtime. allow_playback_interruption (bool): Output only. Whether the playback of this message can be interrupted by the end user's diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/security_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/security_settings.py index 087dbcb5b52b..7d1686c06b6f 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/security_settings.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/security_settings.py @@ -41,7 +41,7 @@ class GetSecuritySettingsRequest(proto.Message): Attributes: name (str): Required. Resource name of the settings. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. """ name: str = proto.Field( @@ -82,7 +82,7 @@ class ListSecuritySettingsRequest(proto.Message): Attributes: parent (str): Required. The location to list all security settings for. - Format: ``projects//locations/``. + Format: ``projects//locations/``. page_size (int): The maximum number of items to return in a single page. By default 20 and at most 100. @@ -140,7 +140,7 @@ class CreateSecuritySettingsRequest(proto.Message): Required. The location to create an [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] for. Format: - ``projects//locations/``. + ``projects//locations/``. security_settings (google.cloud.dialogflowcx_v3.types.SecuritySettings): Required. The security settings to create. """ @@ -164,7 +164,7 @@ class DeleteSecuritySettingsRequest(proto.Message): Required. The name of the [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] to delete. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. """ name: str = proto.Field( @@ -193,7 +193,7 @@ class SecuritySettings(proto.Message): method. [SecuritySettingsService.CreateSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.CreateSecuritySettings] populates the name automatically. Format: - ``projects//locations//securitySettings/``. + ``projects//locations//securitySettings/``. display_name (str): Required. The human-readable name of the security settings, unique within the location. @@ -216,9 +216,9 @@ class SecuritySettings(proto.Message): If empty, we use the default DLP inspect config. The template name will have one of the following formats: - ``projects//locations//inspectTemplates/