diff --git a/tests/integration/goldens/asset/docs/conf.py b/tests/integration/goldens/asset/docs/conf.py index 6c82dd6667..f78a785513 100755 --- a/tests/integration/goldens/asset/docs/conf.py +++ b/tests/integration/goldens/asset/docs/conf.py @@ -25,9 +25,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -81,9 +81,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-asset" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-cloud-asset" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -283,7 +283,7 @@ ( root_doc, "google-cloud-asset.tex", - u"google-cloud-asset Documentation", + "google-cloud-asset Documentation", author, "manual", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/tests/integration/goldens/asset/google/cloud/asset/__init__.py index fd9404f4b6..69c41a3004 100755 --- a/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -18,164 +18,170 @@ __version__ = package_version.__version__ +from google.cloud.asset_v1.services.asset_service.async_client import ( + AssetServiceAsyncClient, +) from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient -from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient - -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningMetadata -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeMoveRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeMoveResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse -from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicy -from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicyConstraint -from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryRequest -from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryResponse -from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesRequest -from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesResponse -from google.cloud.asset_v1.types.asset_service import BigQueryDestination -from google.cloud.asset_v1.types.asset_service import CreateFeedRequest -from google.cloud.asset_v1.types.asset_service import CreateSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import DeleteFeedRequest -from google.cloud.asset_v1.types.asset_service import DeleteSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import ExportAssetsRequest -from google.cloud.asset_v1.types.asset_service import ExportAssetsResponse -from google.cloud.asset_v1.types.asset_service import Feed -from google.cloud.asset_v1.types.asset_service import FeedOutputConfig -from google.cloud.asset_v1.types.asset_service import GcsDestination -from google.cloud.asset_v1.types.asset_service import GcsOutputResult -from google.cloud.asset_v1.types.asset_service import GetFeedRequest -from google.cloud.asset_v1.types.asset_service import GetSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisOutputConfig -from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisQuery -from google.cloud.asset_v1.types.asset_service import ListAssetsRequest -from google.cloud.asset_v1.types.asset_service import ListAssetsResponse -from google.cloud.asset_v1.types.asset_service import ListFeedsRequest -from google.cloud.asset_v1.types.asset_service import ListFeedsResponse -from google.cloud.asset_v1.types.asset_service import ListSavedQueriesRequest -from google.cloud.asset_v1.types.asset_service import ListSavedQueriesResponse -from google.cloud.asset_v1.types.asset_service import MoveAnalysis -from google.cloud.asset_v1.types.asset_service import MoveAnalysisResult -from google.cloud.asset_v1.types.asset_service import MoveImpact -from google.cloud.asset_v1.types.asset_service import OutputConfig -from google.cloud.asset_v1.types.asset_service import OutputResult -from google.cloud.asset_v1.types.asset_service import PartitionSpec -from google.cloud.asset_v1.types.asset_service import PubsubDestination -from google.cloud.asset_v1.types.asset_service import QueryAssetsOutputConfig -from google.cloud.asset_v1.types.asset_service import QueryAssetsRequest -from google.cloud.asset_v1.types.asset_service import QueryAssetsResponse -from google.cloud.asset_v1.types.asset_service import QueryResult -from google.cloud.asset_v1.types.asset_service import SavedQuery -from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesRequest -from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesResponse -from google.cloud.asset_v1.types.asset_service import SearchAllResourcesRequest -from google.cloud.asset_v1.types.asset_service import SearchAllResourcesResponse -from google.cloud.asset_v1.types.asset_service import TableFieldSchema -from google.cloud.asset_v1.types.asset_service import TableSchema -from google.cloud.asset_v1.types.asset_service import UpdateFeedRequest -from google.cloud.asset_v1.types.asset_service import UpdateSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import ContentType -from google.cloud.asset_v1.types.assets import Asset -from google.cloud.asset_v1.types.assets import AttachedResource -from google.cloud.asset_v1.types.assets import ConditionEvaluation -from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult -from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState -from google.cloud.asset_v1.types.assets import IamPolicySearchResult -from google.cloud.asset_v1.types.assets import RelatedAsset -from google.cloud.asset_v1.types.assets import RelatedAssets -from google.cloud.asset_v1.types.assets import RelatedResource -from google.cloud.asset_v1.types.assets import RelatedResources -from google.cloud.asset_v1.types.assets import RelationshipAttributes -from google.cloud.asset_v1.types.assets import Resource -from google.cloud.asset_v1.types.assets import ResourceSearchResult -from google.cloud.asset_v1.types.assets import TemporalAsset -from google.cloud.asset_v1.types.assets import TimeWindow -from google.cloud.asset_v1.types.assets import VersionedResource +from google.cloud.asset_v1.types.asset_service import ( + AnalyzeIamPolicyLongrunningMetadata, + AnalyzeIamPolicyLongrunningRequest, + AnalyzeIamPolicyLongrunningResponse, + AnalyzeIamPolicyRequest, + AnalyzeIamPolicyResponse, + AnalyzeMoveRequest, + AnalyzeMoveResponse, + AnalyzeOrgPoliciesRequest, + AnalyzeOrgPoliciesResponse, + AnalyzeOrgPolicyGovernedAssetsRequest, + AnalyzeOrgPolicyGovernedAssetsResponse, + AnalyzeOrgPolicyGovernedContainersRequest, + AnalyzeOrgPolicyGovernedContainersResponse, + AnalyzerOrgPolicy, + AnalyzerOrgPolicyConstraint, + BatchGetAssetsHistoryRequest, + BatchGetAssetsHistoryResponse, + BatchGetEffectiveIamPoliciesRequest, + BatchGetEffectiveIamPoliciesResponse, + BigQueryDestination, + ContentType, + CreateFeedRequest, + CreateSavedQueryRequest, + DeleteFeedRequest, + DeleteSavedQueryRequest, + ExportAssetsRequest, + ExportAssetsResponse, + Feed, + FeedOutputConfig, + GcsDestination, + GcsOutputResult, + GetFeedRequest, + GetSavedQueryRequest, + IamPolicyAnalysisOutputConfig, + IamPolicyAnalysisQuery, + ListAssetsRequest, + ListAssetsResponse, + ListFeedsRequest, + ListFeedsResponse, + ListSavedQueriesRequest, + ListSavedQueriesResponse, + MoveAnalysis, + MoveAnalysisResult, + MoveImpact, + OutputConfig, + OutputResult, + PartitionSpec, + PubsubDestination, + QueryAssetsOutputConfig, + QueryAssetsRequest, + QueryAssetsResponse, + QueryResult, + SavedQuery, + SearchAllIamPoliciesRequest, + SearchAllIamPoliciesResponse, + SearchAllResourcesRequest, + SearchAllResourcesResponse, + TableFieldSchema, + TableSchema, + UpdateFeedRequest, + UpdateSavedQueryRequest, +) +from google.cloud.asset_v1.types.assets import ( + Asset, + AttachedResource, + ConditionEvaluation, + IamPolicyAnalysisResult, + IamPolicyAnalysisState, + IamPolicySearchResult, + RelatedAsset, + RelatedAssets, + RelatedResource, + RelatedResources, + RelationshipAttributes, + Resource, + ResourceSearchResult, + TemporalAsset, + TimeWindow, + VersionedResource, +) -__all__ = ('AssetServiceClient', - 'AssetServiceAsyncClient', - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', +__all__ = ( + "AssetServiceClient", + "AssetServiceAsyncClient", + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "ContentType", + "Asset", + "AttachedResource", + "ConditionEvaluation", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "TemporalAsset", + "TimeWindow", + "VersionedResource", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 31068ac472..1f156e9cba 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -13,10 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.asset_v1 import gapic_version as package_version +import sys import google.api_core as api_core -import sys + +from google.cloud.asset_v1 import gapic_version as package_version __version__ = package_version.__version__ @@ -27,115 +28,122 @@ # this code path once we drop support for Python 3.7 import importlib_metadata as metadata +from .services.asset_service import AssetServiceAsyncClient, AssetServiceClient +from .types.asset_service import ( + AnalyzeIamPolicyLongrunningMetadata, + AnalyzeIamPolicyLongrunningRequest, + AnalyzeIamPolicyLongrunningResponse, + AnalyzeIamPolicyRequest, + AnalyzeIamPolicyResponse, + AnalyzeMoveRequest, + AnalyzeMoveResponse, + AnalyzeOrgPoliciesRequest, + AnalyzeOrgPoliciesResponse, + AnalyzeOrgPolicyGovernedAssetsRequest, + AnalyzeOrgPolicyGovernedAssetsResponse, + AnalyzeOrgPolicyGovernedContainersRequest, + AnalyzeOrgPolicyGovernedContainersResponse, + AnalyzerOrgPolicy, + AnalyzerOrgPolicyConstraint, + BatchGetAssetsHistoryRequest, + BatchGetAssetsHistoryResponse, + BatchGetEffectiveIamPoliciesRequest, + BatchGetEffectiveIamPoliciesResponse, + BigQueryDestination, + ContentType, + CreateFeedRequest, + CreateSavedQueryRequest, + DeleteFeedRequest, + DeleteSavedQueryRequest, + ExportAssetsRequest, + ExportAssetsResponse, + Feed, + FeedOutputConfig, + GcsDestination, + GcsOutputResult, + GetFeedRequest, + GetSavedQueryRequest, + IamPolicyAnalysisOutputConfig, + IamPolicyAnalysisQuery, + ListAssetsRequest, + ListAssetsResponse, + ListFeedsRequest, + ListFeedsResponse, + ListSavedQueriesRequest, + ListSavedQueriesResponse, + MoveAnalysis, + MoveAnalysisResult, + MoveImpact, + OutputConfig, + OutputResult, + PartitionSpec, + PubsubDestination, + QueryAssetsOutputConfig, + QueryAssetsRequest, + QueryAssetsResponse, + QueryResult, + SavedQuery, + SearchAllIamPoliciesRequest, + SearchAllIamPoliciesResponse, + SearchAllResourcesRequest, + SearchAllResourcesResponse, + TableFieldSchema, + TableSchema, + UpdateFeedRequest, + UpdateSavedQueryRequest, +) +from .types.assets import ( + Asset, + AttachedResource, + ConditionEvaluation, + IamPolicyAnalysisResult, + IamPolicyAnalysisState, + IamPolicySearchResult, + RelatedAsset, + RelatedAssets, + RelatedResource, + RelatedResources, + RelationshipAttributes, + Resource, + ResourceSearchResult, + TemporalAsset, + TimeWindow, + VersionedResource, +) -from .services.asset_service import AssetServiceClient -from .services.asset_service import AssetServiceAsyncClient - -from .types.asset_service import AnalyzeIamPolicyLongrunningMetadata -from .types.asset_service import AnalyzeIamPolicyLongrunningRequest -from .types.asset_service import AnalyzeIamPolicyLongrunningResponse -from .types.asset_service import AnalyzeIamPolicyRequest -from .types.asset_service import AnalyzeIamPolicyResponse -from .types.asset_service import AnalyzeMoveRequest -from .types.asset_service import AnalyzeMoveResponse -from .types.asset_service import AnalyzeOrgPoliciesRequest -from .types.asset_service import AnalyzeOrgPoliciesResponse -from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest -from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse -from .types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest -from .types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse -from .types.asset_service import AnalyzerOrgPolicy -from .types.asset_service import AnalyzerOrgPolicyConstraint -from .types.asset_service import BatchGetAssetsHistoryRequest -from .types.asset_service import BatchGetAssetsHistoryResponse -from .types.asset_service import BatchGetEffectiveIamPoliciesRequest -from .types.asset_service import BatchGetEffectiveIamPoliciesResponse -from .types.asset_service import BigQueryDestination -from .types.asset_service import CreateFeedRequest -from .types.asset_service import CreateSavedQueryRequest -from .types.asset_service import DeleteFeedRequest -from .types.asset_service import DeleteSavedQueryRequest -from .types.asset_service import ExportAssetsRequest -from .types.asset_service import ExportAssetsResponse -from .types.asset_service import Feed -from .types.asset_service import FeedOutputConfig -from .types.asset_service import GcsDestination -from .types.asset_service import GcsOutputResult -from .types.asset_service import GetFeedRequest -from .types.asset_service import GetSavedQueryRequest -from .types.asset_service import IamPolicyAnalysisOutputConfig -from .types.asset_service import IamPolicyAnalysisQuery -from .types.asset_service import ListAssetsRequest -from .types.asset_service import ListAssetsResponse -from .types.asset_service import ListFeedsRequest -from .types.asset_service import ListFeedsResponse -from .types.asset_service import ListSavedQueriesRequest -from .types.asset_service import ListSavedQueriesResponse -from .types.asset_service import MoveAnalysis -from .types.asset_service import MoveAnalysisResult -from .types.asset_service import MoveImpact -from .types.asset_service import OutputConfig -from .types.asset_service import OutputResult -from .types.asset_service import PartitionSpec -from .types.asset_service import PubsubDestination -from .types.asset_service import QueryAssetsOutputConfig -from .types.asset_service import QueryAssetsRequest -from .types.asset_service import QueryAssetsResponse -from .types.asset_service import QueryResult -from .types.asset_service import SavedQuery -from .types.asset_service import SearchAllIamPoliciesRequest -from .types.asset_service import SearchAllIamPoliciesResponse -from .types.asset_service import SearchAllResourcesRequest -from .types.asset_service import SearchAllResourcesResponse -from .types.asset_service import TableFieldSchema -from .types.asset_service import TableSchema -from .types.asset_service import UpdateFeedRequest -from .types.asset_service import UpdateSavedQueryRequest -from .types.asset_service import ContentType -from .types.assets import Asset -from .types.assets import AttachedResource -from .types.assets import ConditionEvaluation -from .types.assets import IamPolicyAnalysisResult -from .types.assets import IamPolicyAnalysisState -from .types.assets import IamPolicySearchResult -from .types.assets import RelatedAsset -from .types.assets import RelatedAssets -from .types.assets import RelatedResource -from .types.assets import RelatedResources -from .types.assets import RelationshipAttributes -from .types.assets import Resource -from .types.assets import ResourceSearchResult -from .types.assets import TemporalAsset -from .types.assets import TimeWindow -from .types.assets import VersionedResource - -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.asset_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.asset_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.asset_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -173,104 +181,108 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'AssetServiceAsyncClient', -'AnalyzeIamPolicyLongrunningMetadata', -'AnalyzeIamPolicyLongrunningRequest', -'AnalyzeIamPolicyLongrunningResponse', -'AnalyzeIamPolicyRequest', -'AnalyzeIamPolicyResponse', -'AnalyzeMoveRequest', -'AnalyzeMoveResponse', -'AnalyzeOrgPoliciesRequest', -'AnalyzeOrgPoliciesResponse', -'AnalyzeOrgPolicyGovernedAssetsRequest', -'AnalyzeOrgPolicyGovernedAssetsResponse', -'AnalyzeOrgPolicyGovernedContainersRequest', -'AnalyzeOrgPolicyGovernedContainersResponse', -'AnalyzerOrgPolicy', -'AnalyzerOrgPolicyConstraint', -'Asset', -'AssetServiceClient', -'AttachedResource', -'BatchGetAssetsHistoryRequest', -'BatchGetAssetsHistoryResponse', -'BatchGetEffectiveIamPoliciesRequest', -'BatchGetEffectiveIamPoliciesResponse', -'BigQueryDestination', -'ConditionEvaluation', -'ContentType', -'CreateFeedRequest', -'CreateSavedQueryRequest', -'DeleteFeedRequest', -'DeleteSavedQueryRequest', -'ExportAssetsRequest', -'ExportAssetsResponse', -'Feed', -'FeedOutputConfig', -'GcsDestination', -'GcsOutputResult', -'GetFeedRequest', -'GetSavedQueryRequest', -'IamPolicyAnalysisOutputConfig', -'IamPolicyAnalysisQuery', -'IamPolicyAnalysisResult', -'IamPolicyAnalysisState', -'IamPolicySearchResult', -'ListAssetsRequest', -'ListAssetsResponse', -'ListFeedsRequest', -'ListFeedsResponse', -'ListSavedQueriesRequest', -'ListSavedQueriesResponse', -'MoveAnalysis', -'MoveAnalysisResult', -'MoveImpact', -'OutputConfig', -'OutputResult', -'PartitionSpec', -'PubsubDestination', -'QueryAssetsOutputConfig', -'QueryAssetsRequest', -'QueryAssetsResponse', -'QueryResult', -'RelatedAsset', -'RelatedAssets', -'RelatedResource', -'RelatedResources', -'RelationshipAttributes', -'Resource', -'ResourceSearchResult', -'SavedQuery', -'SearchAllIamPoliciesRequest', -'SearchAllIamPoliciesResponse', -'SearchAllResourcesRequest', -'SearchAllResourcesResponse', -'TableFieldSchema', -'TableSchema', -'TemporalAsset', -'TimeWindow', -'UpdateFeedRequest', -'UpdateSavedQueryRequest', -'VersionedResource', + "AssetServiceAsyncClient", + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "Asset", + "AssetServiceClient", + "AttachedResource", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "ConditionEvaluation", + "ContentType", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "TemporalAsset", + "TimeWindow", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "VersionedResource", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py index b35796f582..9ce0d49d8a 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import AssetServiceClient from .async_client import AssetServiceAsyncClient +from .client import AssetServiceClient __all__ = ( - 'AssetServiceClient', - 'AssetServiceAsyncClient', + "AssetServiceClient", + "AssetServiceAsyncClient", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 38379ce9d7..819a925f12 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -13,21 +13,31 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging as std_logging from collections import OrderedDict +import logging as std_logging import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.asset_v1 import gapic_version as package_version +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.asset_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -36,26 +46,29 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport + +from google.cloud.asset_v1.services.asset_service import pagers +from google.cloud.asset_v1.types import asset_service, assets + from .client import AssetServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AssetServiceTransport +from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class AssetServiceAsyncClient: """Asset service definition.""" @@ -81,17 +94,29 @@ class AssetServiceAsyncClient: saved_query_path = staticmethod(AssetServiceClient.saved_query_path) parse_saved_query_path = staticmethod(AssetServiceClient.parse_saved_query_path) service_perimeter_path = staticmethod(AssetServiceClient.service_perimeter_path) - parse_service_perimeter_path = staticmethod(AssetServiceClient.parse_service_perimeter_path) - common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) + parse_service_perimeter_path = staticmethod( + AssetServiceClient.parse_service_perimeter_path + ) + common_billing_account_path = staticmethod( + AssetServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AssetServiceClient.parse_common_billing_account_path + ) common_folder_path = staticmethod(AssetServiceClient.common_folder_path) parse_common_folder_path = staticmethod(AssetServiceClient.parse_common_folder_path) common_organization_path = staticmethod(AssetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssetServiceClient.parse_common_organization_path) + parse_common_organization_path = staticmethod( + AssetServiceClient.parse_common_organization_path + ) common_project_path = staticmethod(AssetServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssetServiceClient.parse_common_project_path) + parse_common_project_path = staticmethod( + AssetServiceClient.parse_common_project_path + ) common_location_path = staticmethod(AssetServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssetServiceClient.parse_common_location_path) + parse_common_location_path = staticmethod( + AssetServiceClient.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -127,7 +152,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -190,12 +217,16 @@ def universe_domain(self) -> str: get_transport_class = AssetServiceClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the asset service async client. Args: @@ -253,30 +284,38 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.asset.v1.AssetService", "credentialsType": None, - } + }, ) - async def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def export_assets( + self, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location destinations, the output format is newline-delimited JSON. Each @@ -355,14 +394,14 @@ async def sample_export_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -387,14 +426,15 @@ async def sample_export_assets(): # Done; return the response. return response - async def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsAsyncPager: + async def list_assets( + self, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsAsyncPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -461,10 +501,14 @@ async def sample_list_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -478,14 +522,14 @@ async def sample_list_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -513,13 +557,16 @@ async def sample_list_assets(): # Done; return the response. return response - async def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: + async def batch_get_assets_history( + self, + request: Optional[ + Union[asset_service.BatchGetAssetsHistoryRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when the asset and its attached IAM POLICY both exist. This can @@ -577,14 +624,14 @@ async def sample_batch_get_assets_history(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_assets_history] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_assets_history + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -601,14 +648,15 @@ async def sample_batch_get_assets_history(): # Done; return the response. return response - async def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def create_feed( + self, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset updates. @@ -685,10 +733,14 @@ async def sample_create_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -702,14 +754,14 @@ async def sample_create_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_feed] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_feed + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -726,14 +778,15 @@ async def sample_create_feed(): # Done; return the response. return response - async def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def get_feed( + self, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Gets details about an asset feed. .. code-block:: python @@ -798,10 +851,14 @@ async def sample_get_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -820,9 +877,7 @@ async def sample_get_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -839,14 +894,15 @@ async def sample_get_feed(): # Done; return the response. return response - async def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: + async def list_feeds( + self, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -906,10 +962,14 @@ async def sample_list_feeds(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -923,14 +983,14 @@ async def sample_list_feeds(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_feeds] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_feeds + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -947,14 +1007,15 @@ async def sample_list_feeds(): # Done; return the response. return response - async def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def update_feed( + self, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, + *, + feed: Optional[asset_service.Feed] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Updates an asset feed configuration. .. code-block:: python @@ -1023,10 +1084,14 @@ async def sample_update_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1040,14 +1105,16 @@ async def sample_update_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_feed] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_feed + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("feed.name", request.feed.name),) + ), ) # Validate the universe domain. @@ -1064,14 +1131,15 @@ async def sample_update_feed(): # Done; return the response. return response - async def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_feed( + self, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an asset feed. .. code-block:: python @@ -1121,10 +1189,14 @@ async def sample_delete_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1138,14 +1210,14 @@ async def sample_delete_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_feed] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_feed + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1159,16 +1231,17 @@ async def sample_delete_feed(): metadata=metadata, ) - async def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesAsyncPager: + async def search_all_resources( + self, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllResourcesAsyncPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllResources`` permission @@ -1336,10 +1409,14 @@ async def sample_search_all_resources(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1357,14 +1434,14 @@ async def sample_search_all_resources(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_resources] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_all_resources + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -1392,15 +1469,18 @@ async def sample_search_all_resources(): # Done; return the response. return response - async def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesAsyncPager: + async def search_all_iam_policies( + self, + request: Optional[ + Union[asset_service.SearchAllIamPoliciesRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllIamPoliciesAsyncPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllIamPolicies`` permission on the @@ -1530,10 +1610,14 @@ async def sample_search_all_iam_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1549,14 +1633,14 @@ async def sample_search_all_iam_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_iam_policies] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_all_iam_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -1584,13 +1668,14 @@ async def sample_search_all_iam_policies(): # Done; return the response. return response - async def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: + async def analyze_iam_policy( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -1649,14 +1734,16 @@ async def sample_analyze_iam_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -1673,13 +1760,16 @@ async def sample_analyze_iam_policy(): # Done; return the response. return response - async def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def analyze_iam_policy_longrunning( + self, + request: Optional[ + Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis results to a Google Cloud Storage or a BigQuery destination. For @@ -1758,14 +1848,16 @@ async def sample_analyze_iam_policy_longrunning(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy_longrunning] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_iam_policy_longrunning + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -1790,13 +1882,14 @@ async def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - async def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: + async def analyze_move( + self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is best effort depending on the user's permissions of @@ -1858,14 +1951,14 @@ async def sample_analyze_move(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_move] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_move + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) # Validate the universe domain. @@ -1882,13 +1975,14 @@ async def sample_analyze_move(): # Done; return the response. return response - async def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: + async def query_assets( + self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard SQL `__. @@ -1956,14 +2050,14 @@ async def sample_query_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.query_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.query_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1980,16 +2074,17 @@ async def sample_query_assets(): # Done; return the response. return response - async def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def create_saved_query( + self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2075,10 +2170,14 @@ async def sample_create_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, saved_query, saved_query_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2096,14 +2195,14 @@ async def sample_create_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2120,14 +2219,15 @@ async def sample_create_saved_query(): # Done; return the response. return response - async def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def get_saved_query( + self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Gets details about a saved query. .. code-block:: python @@ -2188,10 +2288,14 @@ async def sample_get_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2205,14 +2309,14 @@ async def sample_get_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2229,14 +2333,15 @@ async def sample_get_saved_query(): # Done; return the response. return response - async def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesAsyncPager: + async def list_saved_queries( + self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSavedQueriesAsyncPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2303,10 +2408,14 @@ async def sample_list_saved_queries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2320,14 +2429,14 @@ async def sample_list_saved_queries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_saved_queries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_saved_queries + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2355,15 +2464,16 @@ async def sample_list_saved_queries(): # Done; return the response. return response - async def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def update_saved_query( + self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Updates a saved query. .. code-block:: python @@ -2432,10 +2542,14 @@ async def sample_update_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [saved_query, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2451,14 +2565,16 @@ async def sample_update_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("saved_query.name", request.saved_query.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("saved_query.name", request.saved_query.name),) + ), ) # Validate the universe domain. @@ -2475,14 +2591,15 @@ async def sample_update_saved_query(): # Done; return the response. return response - async def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_saved_query( + self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a saved query. .. code-block:: python @@ -2534,10 +2651,14 @@ async def sample_delete_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2551,14 +2672,14 @@ async def sample_delete_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2572,13 +2693,16 @@ async def sample_delete_saved_query(): metadata=metadata, ) - async def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + async def batch_get_effective_iam_policies( + self, + request: Optional[ + Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. .. code-block:: python @@ -2634,14 +2758,14 @@ async def sample_batch_get_effective_iam_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_effective_iam_policies] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_effective_iam_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2658,16 +2782,17 @@ async def sample_batch_get_effective_iam_policies(): # Done; return the response. return response - async def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesAsyncPager: + async def analyze_org_policies( + self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPoliciesAsyncPager: r"""Analyzes organization policies under a scope. .. code-block:: python @@ -2757,10 +2882,14 @@ async def sample_analyze_org_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2778,14 +2907,14 @@ async def sample_analyze_org_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policies] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_org_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2813,16 +2942,19 @@ async def sample_analyze_org_policies(): # Done; return the response. return response - async def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: + async def analyze_org_policy_governed_containers( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -2912,14 +3044,20 @@ async def sample_analyze_org_policy_governed_containers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + if not isinstance( + request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest + ): request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2933,14 +3071,14 @@ async def sample_analyze_org_policy_governed_containers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_containers] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_org_policy_governed_containers + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2968,16 +3106,19 @@ async def sample_analyze_org_policy_governed_containers(): # Done; return the response. return response - async def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: + async def analyze_org_policy_governed_assets( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: @@ -3096,10 +3237,14 @@ async def sample_analyze_org_policy_governed_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3117,14 +3262,14 @@ async def sample_analyze_org_policy_governed_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_org_policy_governed_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3190,8 +3335,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3199,7 +3343,11 @@ async def get_operation( # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3210,12 +3358,13 @@ async def __aenter__(self) -> "AssetServiceAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "AssetServiceAsyncClient", -) +__all__ = ("AssetServiceAsyncClient",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 843790205a..ebb4538e18 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -19,22 +19,34 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.asset_v1 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.asset_v1 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -50,15 +63,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO + +from google.cloud.asset_v1.services.asset_service import pagers +from google.cloud.asset_v1.types import asset_service, assets + +from .transports.base import DEFAULT_CLIENT_INFO, AssetServiceTransport from .transports.grpc import AssetServiceGrpcTransport from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport from .transports.rest import AssetServiceRestTransport @@ -71,14 +85,16 @@ class AssetServiceClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] _transport_registry["grpc"] = AssetServiceGrpcTransport _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport _transport_registry["rest"] = AssetServiceRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssetServiceTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AssetServiceTransport]: """Returns an appropriate transport class. Args: @@ -150,14 +166,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -196,8 +214,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: AssetServiceClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -214,23 +231,36 @@ def transport(self) -> AssetServiceTransport: return self._transport @staticmethod - def access_level_path(access_policy: str,access_level: str,) -> str: + def access_level_path( + access_policy: str, + access_level: str, + ) -> str: """Returns a fully-qualified access_level string.""" - return "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + return "accessPolicies/{access_policy}/accessLevels/{access_level}".format( + access_policy=access_policy, + access_level=access_level, + ) @staticmethod - def parse_access_level_path(path: str) -> Dict[str,str]: + def parse_access_level_path(path: str) -> Dict[str, str]: """Parses a access_level path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", path) + m = re.match( + r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def access_policy_path(access_policy: str,) -> str: + def access_policy_path( + access_policy: str, + ) -> str: """Returns a fully-qualified access_policy string.""" - return "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + return "accessPolicies/{access_policy}".format( + access_policy=access_policy, + ) @staticmethod - def parse_access_policy_path(path: str) -> Dict[str,str]: + def parse_access_policy_path(path: str) -> Dict[str, str]: """Parses a access_policy path into its component segments.""" m = re.match(r"^accessPolicies/(?P.+?)$", path) return m.groupdict() if m else {} @@ -241,112 +271,170 @@ def asset_path() -> str: return "*".format() @staticmethod - def parse_asset_path(path: str) -> Dict[str,str]: + def parse_asset_path(path: str) -> Dict[str, str]: """Parses a asset path into its component segments.""" m = re.match(r"^.*$", path) return m.groupdict() if m else {} @staticmethod - def feed_path(project: str,feed: str,) -> str: + def feed_path( + project: str, + feed: str, + ) -> str: """Returns a fully-qualified feed string.""" - return "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) + return "projects/{project}/feeds/{feed}".format( + project=project, + feed=feed, + ) @staticmethod - def parse_feed_path(path: str) -> Dict[str,str]: + def parse_feed_path(path: str) -> Dict[str, str]: """Parses a feed path into its component segments.""" m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def inventory_path(project: str,location: str,instance: str,) -> str: + def inventory_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified inventory string.""" - return "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_inventory_path(path: str) -> Dict[str,str]: + def parse_inventory_path(path: str) -> Dict[str, str]: """Parses a inventory path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", + path, + ) return m.groupdict() if m else {} @staticmethod - def saved_query_path(project: str,saved_query: str,) -> str: + def saved_query_path( + project: str, + saved_query: str, + ) -> str: """Returns a fully-qualified saved_query string.""" - return "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + return "projects/{project}/savedQueries/{saved_query}".format( + project=project, + saved_query=saved_query, + ) @staticmethod - def parse_saved_query_path(path: str) -> Dict[str,str]: + def parse_saved_query_path(path: str) -> Dict[str, str]: """Parses a saved_query path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def service_perimeter_path(access_policy: str,service_perimeter: str,) -> str: + def service_perimeter_path( + access_policy: str, + service_perimeter: str, + ) -> str: """Returns a fully-qualified service_perimeter string.""" - return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format( + access_policy=access_policy, + service_perimeter=service_perimeter, + ) @staticmethod - def parse_service_perimeter_path(path: str) -> Dict[str,str]: + def parse_service_perimeter_path(path: str) -> Dict[str, str]: """Parses a service_perimeter path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", path) + m = re.match( + r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -378,14 +466,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = AssetServiceClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -398,7 +490,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -423,7 +517,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -446,7 +542,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -462,17 +560,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = AssetServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -508,15 +614,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -549,12 +658,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the asset service client. Args: @@ -612,14 +725,24 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() - self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = AssetServiceClient._read_environment_variables() + self._client_cert_source = AssetServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AssetServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -630,7 +753,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -639,8 +764,10 @@ def __init__(self, *, if transport_provided: # transport is a AssetServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -649,20 +776,26 @@ def __init__(self, *, self._transport = cast(AssetServiceTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - AssetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or AssetServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( + transport_init: Union[ + Type[AssetServiceTransport], Callable[..., AssetServiceTransport] + ] = ( AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) @@ -681,27 +814,36 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceClient`.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.asset.v1.AssetService", "credentialsType": None, - } + }, ) - def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def export_assets( + self, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location destinations, the output format is newline-delimited JSON. Each @@ -785,9 +927,7 @@ def sample_export_assets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -812,14 +952,15 @@ def sample_export_assets(): # Done; return the response. return response - def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsPager: + def list_assets( + self, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -886,10 +1027,14 @@ def sample_list_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -907,9 +1052,7 @@ def sample_list_assets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -937,13 +1080,16 @@ def sample_list_assets(): # Done; return the response. return response - def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: + def batch_get_assets_history( + self, + request: Optional[ + Union[asset_service.BatchGetAssetsHistoryRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when the asset and its attached IAM POLICY both exist. This can @@ -1006,9 +1152,7 @@ def sample_batch_get_assets_history(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1025,14 +1169,15 @@ def sample_batch_get_assets_history(): # Done; return the response. return response - def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def create_feed( + self, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset updates. @@ -1109,10 +1254,14 @@ def sample_create_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1130,9 +1279,7 @@ def sample_create_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1149,14 +1296,15 @@ def sample_create_feed(): # Done; return the response. return response - def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def get_feed( + self, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Gets details about an asset feed. .. code-block:: python @@ -1221,10 +1369,14 @@ def sample_get_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1242,9 +1394,7 @@ def sample_get_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1261,14 +1411,15 @@ def sample_get_feed(): # Done; return the response. return response - def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: + def list_feeds( + self, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -1328,10 +1479,14 @@ def sample_list_feeds(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1349,9 +1504,7 @@ def sample_list_feeds(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1368,14 +1521,15 @@ def sample_list_feeds(): # Done; return the response. return response - def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def update_feed( + self, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, + *, + feed: Optional[asset_service.Feed] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Updates an asset feed configuration. .. code-block:: python @@ -1444,10 +1598,14 @@ def sample_update_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1465,9 +1623,9 @@ def sample_update_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("feed.name", request.feed.name),) + ), ) # Validate the universe domain. @@ -1484,14 +1642,15 @@ def sample_update_feed(): # Done; return the response. return response - def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_feed( + self, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an asset feed. .. code-block:: python @@ -1541,10 +1700,14 @@ def sample_delete_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1562,9 +1725,7 @@ def sample_delete_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1578,16 +1739,17 @@ def sample_delete_feed(): metadata=metadata, ) - def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesPager: + def search_all_resources( + self, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllResourcesPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllResources`` permission @@ -1755,10 +1917,14 @@ def sample_search_all_resources(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1780,9 +1946,7 @@ def sample_search_all_resources(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -1810,15 +1974,18 @@ def sample_search_all_resources(): # Done; return the response. return response - def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesPager: + def search_all_iam_policies( + self, + request: Optional[ + Union[asset_service.SearchAllIamPoliciesRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllIamPoliciesPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllIamPolicies`` permission on the @@ -1948,10 +2115,14 @@ def sample_search_all_iam_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1971,9 +2142,7 @@ def sample_search_all_iam_policies(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2001,13 +2170,14 @@ def sample_search_all_iam_policies(): # Done; return the response. return response - def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: + def analyze_iam_policy( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -2071,9 +2241,9 @@ def sample_analyze_iam_policy(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -2090,13 +2260,16 @@ def sample_analyze_iam_policy(): # Done; return the response. return response - def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def analyze_iam_policy_longrunning( + self, + request: Optional[ + Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis results to a Google Cloud Storage or a BigQuery destination. For @@ -2175,14 +2348,16 @@ def sample_analyze_iam_policy_longrunning(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy_longrunning] + rpc = self._transport._wrapped_methods[ + self._transport.analyze_iam_policy_longrunning + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -2207,13 +2382,14 @@ def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: + def analyze_move( + self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is best effort depending on the user's permissions of @@ -2280,9 +2456,7 @@ def sample_analyze_move(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) # Validate the universe domain. @@ -2299,13 +2473,14 @@ def sample_analyze_move(): # Done; return the response. return response - def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: + def query_assets( + self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard SQL `__. @@ -2378,9 +2553,7 @@ def sample_query_assets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2397,16 +2570,17 @@ def sample_query_assets(): # Done; return the response. return response - def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def create_saved_query( + self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2492,10 +2666,14 @@ def sample_create_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, saved_query, saved_query_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2517,9 +2695,7 @@ def sample_create_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2536,14 +2712,15 @@ def sample_create_saved_query(): # Done; return the response. return response - def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def get_saved_query( + self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Gets details about a saved query. .. code-block:: python @@ -2604,10 +2781,14 @@ def sample_get_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2625,9 +2806,7 @@ def sample_get_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2644,14 +2823,15 @@ def sample_get_saved_query(): # Done; return the response. return response - def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesPager: + def list_saved_queries( + self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSavedQueriesPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2718,10 +2898,14 @@ def sample_list_saved_queries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2739,9 +2923,7 @@ def sample_list_saved_queries(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2769,15 +2951,16 @@ def sample_list_saved_queries(): # Done; return the response. return response - def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def update_saved_query( + self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Updates a saved query. .. code-block:: python @@ -2846,10 +3029,14 @@ def sample_update_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [saved_query, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2869,9 +3056,9 @@ def sample_update_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("saved_query.name", request.saved_query.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("saved_query.name", request.saved_query.name),) + ), ) # Validate the universe domain. @@ -2888,14 +3075,15 @@ def sample_update_saved_query(): # Done; return the response. return response - def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_saved_query( + self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a saved query. .. code-block:: python @@ -2947,10 +3135,14 @@ def sample_delete_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2968,9 +3160,7 @@ def sample_delete_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2984,13 +3174,16 @@ def sample_delete_saved_query(): metadata=metadata, ) - def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def batch_get_effective_iam_policies( + self, + request: Optional[ + Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. .. code-block:: python @@ -3046,14 +3239,14 @@ def sample_batch_get_effective_iam_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_get_effective_iam_policies] + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_effective_iam_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3070,16 +3263,17 @@ def sample_batch_get_effective_iam_policies(): # Done; return the response. return response - def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesPager: + def analyze_org_policies( + self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPoliciesPager: r"""Analyzes organization policies under a scope. .. code-block:: python @@ -3169,10 +3363,14 @@ def sample_analyze_org_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3194,9 +3392,7 @@ def sample_analyze_org_policies(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3224,16 +3420,19 @@ def sample_analyze_org_policies(): # Done; return the response. return response - def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: + def analyze_org_policy_governed_containers( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -3323,14 +3522,20 @@ def sample_analyze_org_policy_governed_containers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + if not isinstance( + request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest + ): request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3343,14 +3548,14 @@ def sample_analyze_org_policy_governed_containers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_containers] + rpc = self._transport._wrapped_methods[ + self._transport.analyze_org_policy_governed_containers + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3378,16 +3583,19 @@ def sample_analyze_org_policy_governed_containers(): # Done; return the response. return response - def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: + def analyze_org_policy_governed_assets( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: @@ -3506,10 +3714,14 @@ def sample_analyze_org_policy_governed_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3526,14 +3738,14 @@ def sample_analyze_org_policy_governed_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_assets] + rpc = self._transport._wrapped_methods[ + self._transport.analyze_org_policy_governed_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3612,8 +3824,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3622,7 +3833,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3631,18 +3846,11 @@ def get_operation( raise e - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "AssetServiceClient", -) +__all__ = ("AssetServiceClient",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 5d14be97dd..11261a60e0 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -13,19 +13,32 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets +from google.cloud.asset_v1.types import asset_service, assets class ListAssetsPager: @@ -45,14 +58,17 @@ class ListAssetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.ListAssetsResponse], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.ListAssetsResponse], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -85,7 +101,12 @@ def pages(self) -> Iterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[assets.Asset]: @@ -93,7 +114,7 @@ def __iter__(self) -> Iterator[assets.Asset]: yield from page.assets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAssetsAsyncPager: @@ -113,14 +134,17 @@ class ListAssetsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -153,8 +177,14 @@ async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[assets.Asset]: async def async_generator(): async for page in self.pages: @@ -164,7 +194,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllResourcesPager: @@ -184,14 +214,17 @@ class SearchAllResourcesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.SearchAllResourcesResponse], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.SearchAllResourcesResponse], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -224,7 +257,12 @@ def pages(self) -> Iterator[asset_service.SearchAllResourcesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[assets.ResourceSearchResult]: @@ -232,7 +270,7 @@ def __iter__(self) -> Iterator[assets.ResourceSearchResult]: yield from page.results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllResourcesAsyncPager: @@ -252,14 +290,17 @@ class SearchAllResourcesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -292,8 +333,14 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse] yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[assets.ResourceSearchResult]: async def async_generator(): async for page in self.pages: @@ -303,7 +350,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllIamPoliciesPager: @@ -323,14 +370,17 @@ class SearchAllIamPoliciesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.SearchAllIamPoliciesResponse], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.SearchAllIamPoliciesResponse], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -363,7 +413,12 @@ def pages(self) -> Iterator[asset_service.SearchAllIamPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: @@ -371,7 +426,7 @@ def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: yield from page.results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllIamPoliciesAsyncPager: @@ -391,14 +446,17 @@ class SearchAllIamPoliciesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -431,8 +489,14 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesRespons yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: async def async_generator(): async for page in self.pages: @@ -442,7 +506,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSavedQueriesPager: @@ -462,14 +526,17 @@ class ListSavedQueriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.ListSavedQueriesResponse], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.ListSavedQueriesResponse], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -502,7 +569,12 @@ def pages(self) -> Iterator[asset_service.ListSavedQueriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[asset_service.SavedQuery]: @@ -510,7 +582,7 @@ def __iter__(self) -> Iterator[asset_service.SavedQuery]: yield from page.saved_queries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSavedQueriesAsyncPager: @@ -530,14 +602,17 @@ class ListSavedQueriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -570,8 +645,14 @@ async def pages(self) -> AsyncIterator[asset_service.ListSavedQueriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.SavedQuery]: async def async_generator(): async for page in self.pages: @@ -581,7 +662,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPoliciesPager: @@ -601,14 +682,17 @@ class AnalyzeOrgPoliciesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -641,15 +725,22 @@ def pages(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + def __iter__( + self, + ) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: for page in self.pages: yield from page.org_policy_results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPoliciesAsyncPager: @@ -669,14 +760,17 @@ class AnalyzeOrgPoliciesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -709,9 +803,17 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse] yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + + def __aiter__( + self, + ) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: async def async_generator(): async for page in self.pages: for response in page.org_policy_results: @@ -720,7 +822,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedContainersPager: @@ -740,14 +842,17 @@ class AnalyzeOrgPolicyGovernedContainersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -776,19 +881,30 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def pages( + self, + ) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + def __iter__( + self, + ) -> Iterator[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer + ]: for page in self.pages: yield from page.governed_containers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedContainersAsyncPager: @@ -808,14 +924,19 @@ class AnalyzeOrgPolicyGovernedContainersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[ + ..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] + ], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -844,13 +965,25 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + async def pages( + self, + ) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + + def __aiter__( + self, + ) -> AsyncIterator[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer + ]: async def async_generator(): async for page in self.pages: for response in page.governed_containers: @@ -859,7 +992,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedAssetsPager: @@ -879,14 +1012,17 @@ class AnalyzeOrgPolicyGovernedAssetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -919,15 +1055,22 @@ def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + def __iter__( + self, + ) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: for page in self.pages: yield from page.governed_assets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedAssetsAsyncPager: @@ -947,14 +1090,19 @@ class AnalyzeOrgPolicyGovernedAssetsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[ + ..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] + ], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -983,13 +1131,25 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + async def pages( + self, + ) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + + def __aiter__( + self, + ) -> AsyncIterator[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ]: async def async_generator(): async for page in self.pages: for response in page.governed_assets: @@ -998,4 +1158,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py index be001a49d9..2784fa6856 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -19,20 +19,18 @@ from .base import AssetServiceTransport from .grpc import AssetServiceGrpcTransport from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .rest import AssetServiceRestTransport -from .rest import AssetServiceRestInterceptor - +from .rest import AssetServiceRestInterceptor, AssetServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] -_transport_registry['grpc'] = AssetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssetServiceRestTransport +_transport_registry["grpc"] = AssetServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AssetServiceRestTransport __all__ = ( - 'AssetServiceTransport', - 'AssetServiceGrpcTransport', - 'AssetServiceGrpcAsyncIOTransport', - 'AssetServiceRestTransport', - 'AssetServiceRestInterceptor', + "AssetServiceTransport", + "AssetServiceGrpcTransport", + "AssetServiceGrpcAsyncIOTransport", + "AssetServiceRestTransport", + "AssetServiceRestInterceptor", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index d1f8c36c11..9c1609c99c 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -16,23 +16,23 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.asset_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.cloud.asset_v1 import gapic_version as package_version from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,24 +41,23 @@ class AssetServiceTransport(abc.ABC): """Abstract transport class for AssetService.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = 'cloudasset.googleapis.com' + DEFAULT_HOST: str = "cloudasset.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -93,31 +92,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -316,14 +327,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -333,210 +344,248 @@ def operations_client(self): raise NotImplementedError() @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def export_assets( + self, + ) -> Callable[ + [asset_service.ExportAssetsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Union[ - asset_service.ListAssetsResponse, - Awaitable[asset_service.ListAssetsResponse] - ]]: + def list_assets( + self, + ) -> Callable[ + [asset_service.ListAssetsRequest], + Union[ + asset_service.ListAssetsResponse, + Awaitable[asset_service.ListAssetsResponse], + ], + ]: raise NotImplementedError() @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Union[ - asset_service.BatchGetAssetsHistoryResponse, - Awaitable[asset_service.BatchGetAssetsHistoryResponse] - ]]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + Union[ + asset_service.BatchGetAssetsHistoryResponse, + Awaitable[asset_service.BatchGetAssetsHistoryResponse], + ], + ]: raise NotImplementedError() @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def create_feed( + self, + ) -> Callable[ + [asset_service.CreateFeedRequest], + Union[asset_service.Feed, Awaitable[asset_service.Feed]], + ]: raise NotImplementedError() @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def get_feed( + self, + ) -> Callable[ + [asset_service.GetFeedRequest], + Union[asset_service.Feed, Awaitable[asset_service.Feed]], + ]: raise NotImplementedError() @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Union[ - asset_service.ListFeedsResponse, - Awaitable[asset_service.ListFeedsResponse] - ]]: + def list_feeds( + self, + ) -> Callable[ + [asset_service.ListFeedsRequest], + Union[ + asset_service.ListFeedsResponse, Awaitable[asset_service.ListFeedsResponse] + ], + ]: raise NotImplementedError() @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def update_feed( + self, + ) -> Callable[ + [asset_service.UpdateFeedRequest], + Union[asset_service.Feed, Awaitable[asset_service.Feed]], + ]: raise NotImplementedError() @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_feed( + self, + ) -> Callable[ + [asset_service.DeleteFeedRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Union[ - asset_service.SearchAllResourcesResponse, - Awaitable[asset_service.SearchAllResourcesResponse] - ]]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + Union[ + asset_service.SearchAllResourcesResponse, + Awaitable[asset_service.SearchAllResourcesResponse], + ], + ]: raise NotImplementedError() @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Union[ - asset_service.SearchAllIamPoliciesResponse, - Awaitable[asset_service.SearchAllIamPoliciesResponse] - ]]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + Union[ + asset_service.SearchAllIamPoliciesResponse, + Awaitable[asset_service.SearchAllIamPoliciesResponse], + ], + ]: raise NotImplementedError() @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Union[ - asset_service.AnalyzeIamPolicyResponse, - Awaitable[asset_service.AnalyzeIamPolicyResponse] - ]]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + Union[ + asset_service.AnalyzeIamPolicyResponse, + Awaitable[asset_service.AnalyzeIamPolicyResponse], + ], + ]: raise NotImplementedError() @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Union[ - asset_service.AnalyzeMoveResponse, - Awaitable[asset_service.AnalyzeMoveResponse] - ]]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], + Union[ + asset_service.AnalyzeMoveResponse, + Awaitable[asset_service.AnalyzeMoveResponse], + ], + ]: raise NotImplementedError() @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Union[ - asset_service.QueryAssetsResponse, - Awaitable[asset_service.QueryAssetsResponse] - ]]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], + Union[ + asset_service.QueryAssetsResponse, + Awaitable[asset_service.QueryAssetsResponse], + ], + ]: raise NotImplementedError() @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def create_saved_query( + self, + ) -> Callable[ + [asset_service.CreateSavedQueryRequest], + Union[asset_service.SavedQuery, Awaitable[asset_service.SavedQuery]], + ]: raise NotImplementedError() @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def get_saved_query( + self, + ) -> Callable[ + [asset_service.GetSavedQueryRequest], + Union[asset_service.SavedQuery, Awaitable[asset_service.SavedQuery]], + ]: raise NotImplementedError() @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Union[ - asset_service.ListSavedQueriesResponse, - Awaitable[asset_service.ListSavedQueriesResponse] - ]]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Union[ + asset_service.ListSavedQueriesResponse, + Awaitable[asset_service.ListSavedQueriesResponse], + ], + ]: raise NotImplementedError() @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def update_saved_query( + self, + ) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + Union[asset_service.SavedQuery, Awaitable[asset_service.SavedQuery]], + ]: raise NotImplementedError() @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_saved_query( + self, + ) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Union[ - asset_service.BatchGetEffectiveIamPoliciesResponse, - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse] - ]]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Union[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse], + ], + ]: raise NotImplementedError() @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Union[ - asset_service.AnalyzeOrgPoliciesResponse, - Awaitable[asset_service.AnalyzeOrgPoliciesResponse] - ]]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Union[ + asset_service.AnalyzeOrgPoliciesResponse, + Awaitable[asset_service.AnalyzeOrgPoliciesResponse], + ], + ]: raise NotImplementedError() @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] - ]]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + ], + ]: raise NotImplementedError() @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] - ]]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + ], + ]: raise NotImplementedError() @property @@ -553,6 +602,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'AssetServiceTransport', -) +__all__ = ("AssetServiceTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 1edf735194..7dea734c1f 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -16,28 +16,27 @@ import json import logging as std_logging import pickle -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore import proto # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, AssetServiceTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -47,7 +46,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -68,7 +69,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -79,7 +80,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -94,7 +99,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": client_call_details.method, "response": grpc_response, @@ -116,23 +121,26 @@ class AssetServiceGrpcTransport(AssetServiceTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -256,19 +264,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -304,13 +316,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -330,9 +341,9 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: + def export_assets( + self, + ) -> Callable[[asset_service.ExportAssetsRequest], operations_pb2.Operation]: r"""Return a callable for the export assets method over gRPC. Exports assets with time and resource types to a given Cloud @@ -359,18 +370,18 @@ def export_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', + if "export_assets" not in self._stubs: + self._stubs["export_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ExportAssets", request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_assets'] + return self._stubs["export_assets"] @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: + def list_assets( + self, + ) -> Callable[[asset_service.ListAssetsRequest], asset_service.ListAssetsResponse]: r"""Return a callable for the list assets method over gRPC. Lists assets with time and resource types and returns @@ -386,18 +397,21 @@ def list_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', + if "list_assets" not in self._stubs: + self._stubs["list_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListAssets", request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, ) - return self._stubs['list_assets'] + return self._stubs["list_assets"] @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + asset_service.BatchGetAssetsHistoryResponse, + ]: r"""Return a callable for the batch get assets history method over gRPC. Batch gets the update history of assets that overlap a time @@ -418,18 +432,18 @@ def batch_get_assets_history(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + if "batch_get_assets_history" not in self._stubs: + self._stubs["batch_get_assets_history"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, ) - return self._stubs['batch_get_assets_history'] + return self._stubs["batch_get_assets_history"] @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], asset_service.Feed]: r"""Return a callable for the create feed method over gRPC. Creates a feed in a parent @@ -446,18 +460,16 @@ def create_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', + if "create_feed" not in self._stubs: + self._stubs["create_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateFeed", request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['create_feed'] + return self._stubs["create_feed"] @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: + def get_feed(self) -> Callable[[asset_service.GetFeedRequest], asset_service.Feed]: r"""Return a callable for the get feed method over gRPC. Gets details about an asset feed. @@ -472,18 +484,18 @@ def get_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', + if "get_feed" not in self._stubs: + self._stubs["get_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetFeed", request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['get_feed'] + return self._stubs["get_feed"] @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: + def list_feeds( + self, + ) -> Callable[[asset_service.ListFeedsRequest], asset_service.ListFeedsResponse]: r"""Return a callable for the list feeds method over gRPC. Lists all asset feeds in a parent @@ -499,18 +511,18 @@ def list_feeds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', + if "list_feeds" not in self._stubs: + self._stubs["list_feeds"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListFeeds", request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, ) - return self._stubs['list_feeds'] + return self._stubs["list_feeds"] @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], asset_service.Feed]: r"""Return a callable for the update feed method over gRPC. Updates an asset feed configuration. @@ -525,18 +537,18 @@ def update_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', + if "update_feed" not in self._stubs: + self._stubs["update_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateFeed", request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['update_feed'] + return self._stubs["update_feed"] @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], empty_pb2.Empty]: r"""Return a callable for the delete feed method over gRPC. Deletes an asset feed. @@ -551,18 +563,21 @@ def delete_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', + if "delete_feed" not in self._stubs: + self._stubs["delete_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteFeed", request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_feed'] + return self._stubs["delete_feed"] @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + asset_service.SearchAllResourcesResponse, + ]: r"""Return a callable for the search all resources method over gRPC. Searches all Google Cloud resources within the specified scope, @@ -580,18 +595,21 @@ def search_all_resources(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', + if "search_all_resources" not in self._stubs: + self._stubs["search_all_resources"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllResources", request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, ) - return self._stubs['search_all_resources'] + return self._stubs["search_all_resources"] @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + asset_service.SearchAllIamPoliciesResponse, + ]: r"""Return a callable for the search all iam policies method over gRPC. Searches all IAM policies within the specified scope, such as a @@ -609,18 +627,20 @@ def search_all_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + if "search_all_iam_policies" not in self._stubs: + self._stubs["search_all_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllIamPolicies", request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, ) - return self._stubs['search_all_iam_policies'] + return self._stubs["search_all_iam_policies"] @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], asset_service.AnalyzeIamPolicyResponse + ]: r"""Return a callable for the analyze iam policy method over gRPC. Analyzes IAM policies to answer which identities have @@ -636,18 +656,20 @@ def analyze_iam_policy(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + if "analyze_iam_policy" not in self._stubs: + self._stubs["analyze_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy", request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, ) - return self._stubs['analyze_iam_policy'] + return self._stubs["analyze_iam_policy"] @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], operations_pb2.Operation + ]: r"""Return a callable for the analyze iam policy longrunning method over gRPC. Analyzes IAM policies asynchronously to answer which identities @@ -673,18 +695,22 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', + if "analyze_iam_policy_longrunning" not in self._stubs: + self._stubs[ + "analyze_iam_policy_longrunning" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning", request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['analyze_iam_policy_longrunning'] + return self._stubs["analyze_iam_policy_longrunning"] @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], asset_service.AnalyzeMoveResponse + ]: r"""Return a callable for the analyze move method over gRPC. Analyze moving a resource to a specified destination @@ -705,18 +731,20 @@ def analyze_move(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', + if "analyze_move" not in self._stubs: + self._stubs["analyze_move"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeMove", request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, ) - return self._stubs['analyze_move'] + return self._stubs["analyze_move"] @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], asset_service.QueryAssetsResponse + ]: r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement compatible @@ -746,18 +774,18 @@ def query_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', + if "query_assets" not in self._stubs: + self._stubs["query_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/QueryAssets", request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, ) - return self._stubs['query_assets'] + return self._stubs["query_assets"] @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: + def create_saved_query( + self, + ) -> Callable[[asset_service.CreateSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the create saved query method over gRPC. Creates a saved query in a parent @@ -773,18 +801,18 @@ def create_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + if "create_saved_query" not in self._stubs: + self._stubs["create_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateSavedQuery", request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['create_saved_query'] + return self._stubs["create_saved_query"] @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: + def get_saved_query( + self, + ) -> Callable[[asset_service.GetSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the get saved query method over gRPC. Gets details about a saved query. @@ -799,18 +827,20 @@ def get_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', + if "get_saved_query" not in self._stubs: + self._stubs["get_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetSavedQuery", request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['get_saved_query'] + return self._stubs["get_saved_query"] @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], asset_service.ListSavedQueriesResponse + ]: r"""Return a callable for the list saved queries method over gRPC. Lists all saved queries in a parent @@ -826,18 +856,18 @@ def list_saved_queries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', + if "list_saved_queries" not in self._stubs: + self._stubs["list_saved_queries"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListSavedQueries", request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, ) - return self._stubs['list_saved_queries'] + return self._stubs["list_saved_queries"] @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: + def update_saved_query( + self, + ) -> Callable[[asset_service.UpdateSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the update saved query method over gRPC. Updates a saved query. @@ -852,18 +882,18 @@ def update_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + if "update_saved_query" not in self._stubs: + self._stubs["update_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateSavedQuery", request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['update_saved_query'] + return self._stubs["update_saved_query"] @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], empty_pb2.Empty]: r"""Return a callable for the delete saved query method over gRPC. Deletes a saved query. @@ -878,18 +908,21 @@ def delete_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + if "delete_saved_query" not in self._stubs: + self._stubs["delete_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteSavedQuery", request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_saved_query'] + return self._stubs["delete_saved_query"] @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse, + ]: r"""Return a callable for the batch get effective iam policies method over gRPC. @@ -905,18 +938,23 @@ def batch_get_effective_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + if "batch_get_effective_iam_policies" not in self._stubs: + self._stubs[ + "batch_get_effective_iam_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies", request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, ) - return self._stubs['batch_get_effective_iam_policies'] + return self._stubs["batch_get_effective_iam_policies"] @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse, + ]: r"""Return a callable for the analyze org policies method over gRPC. Analyzes organization policies under a scope. @@ -931,18 +969,21 @@ def analyze_org_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + if "analyze_org_policies" not in self._stubs: + self._stubs["analyze_org_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies", request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, ) - return self._stubs['analyze_org_policies'] + return self._stubs["analyze_org_policies"] @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + ]: r"""Return a callable for the analyze org policy governed containers method over gRPC. @@ -959,18 +1000,23 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + if "analyze_org_policy_governed_containers" not in self._stubs: + self._stubs[ + "analyze_org_policy_governed_containers" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers", request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_containers'] + return self._stubs["analyze_org_policy_governed_containers"] @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + ]: r"""Return a callable for the analyze org policy governed assets method over gRPC. @@ -1004,13 +1050,15 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + if "analyze_org_policy_governed_assets" not in self._stubs: + self._stubs[ + "analyze_org_policy_governed_assets" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets", request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_assets'] + return self._stubs["analyze_org_policy_governed_assets"] def close(self): self._logged_channel.close() @@ -1019,8 +1067,7 @@ def close(self): def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1038,6 +1085,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'AssetServiceGrpcTransport', -) +__all__ = ("AssetServiceGrpcTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index a2b4aeb11a..9b20dc81a9 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -15,33 +15,32 @@ # import inspect import json -import pickle import logging as std_logging -import warnings +import pickle from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore from grpc.experimental import aio # type: ignore +import proto # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, AssetServiceTransport from .grpc import AssetServiceGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -49,9 +48,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -72,7 +75,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -83,7 +86,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -98,7 +105,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -125,13 +132,15 @@ class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -162,24 +171,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -305,7 +316,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -336,9 +349,11 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Awaitable[operations_pb2.Operation]]: + def export_assets( + self, + ) -> Callable[ + [asset_service.ExportAssetsRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the export assets method over gRPC. Exports assets with time and resource types to a given Cloud @@ -365,18 +380,20 @@ def export_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', + if "export_assets" not in self._stubs: + self._stubs["export_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ExportAssets", request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_assets'] + return self._stubs["export_assets"] @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Awaitable[asset_service.ListAssetsResponse]]: + def list_assets( + self, + ) -> Callable[ + [asset_service.ListAssetsRequest], Awaitable[asset_service.ListAssetsResponse] + ]: r"""Return a callable for the list assets method over gRPC. Lists assets with time and resource types and returns @@ -392,18 +409,21 @@ def list_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', + if "list_assets" not in self._stubs: + self._stubs["list_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListAssets", request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, ) - return self._stubs['list_assets'] + return self._stubs["list_assets"] @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Awaitable[asset_service.BatchGetAssetsHistoryResponse]]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + Awaitable[asset_service.BatchGetAssetsHistoryResponse], + ]: r"""Return a callable for the batch get assets history method over gRPC. Batch gets the update history of assets that overlap a time @@ -424,18 +444,18 @@ def batch_get_assets_history(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + if "batch_get_assets_history" not in self._stubs: + self._stubs["batch_get_assets_history"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, ) - return self._stubs['batch_get_assets_history'] + return self._stubs["batch_get_assets_history"] @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Awaitable[asset_service.Feed]]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the create feed method over gRPC. Creates a feed in a parent @@ -452,18 +472,18 @@ def create_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', + if "create_feed" not in self._stubs: + self._stubs["create_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateFeed", request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['create_feed'] + return self._stubs["create_feed"] @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Awaitable[asset_service.Feed]]: + def get_feed( + self, + ) -> Callable[[asset_service.GetFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the get feed method over gRPC. Gets details about an asset feed. @@ -478,18 +498,20 @@ def get_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', + if "get_feed" not in self._stubs: + self._stubs["get_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetFeed", request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['get_feed'] + return self._stubs["get_feed"] @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Awaitable[asset_service.ListFeedsResponse]]: + def list_feeds( + self, + ) -> Callable[ + [asset_service.ListFeedsRequest], Awaitable[asset_service.ListFeedsResponse] + ]: r"""Return a callable for the list feeds method over gRPC. Lists all asset feeds in a parent @@ -505,18 +527,18 @@ def list_feeds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', + if "list_feeds" not in self._stubs: + self._stubs["list_feeds"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListFeeds", request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, ) - return self._stubs['list_feeds'] + return self._stubs["list_feeds"] @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Awaitable[asset_service.Feed]]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the update feed method over gRPC. Updates an asset feed configuration. @@ -531,18 +553,18 @@ def update_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', + if "update_feed" not in self._stubs: + self._stubs["update_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateFeed", request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['update_feed'] + return self._stubs["update_feed"] @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Awaitable[empty_pb2.Empty]]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete feed method over gRPC. Deletes an asset feed. @@ -557,18 +579,21 @@ def delete_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', + if "delete_feed" not in self._stubs: + self._stubs["delete_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteFeed", request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_feed'] + return self._stubs["delete_feed"] @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Awaitable[asset_service.SearchAllResourcesResponse]]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + Awaitable[asset_service.SearchAllResourcesResponse], + ]: r"""Return a callable for the search all resources method over gRPC. Searches all Google Cloud resources within the specified scope, @@ -586,18 +611,21 @@ def search_all_resources(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', + if "search_all_resources" not in self._stubs: + self._stubs["search_all_resources"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllResources", request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, ) - return self._stubs['search_all_resources'] + return self._stubs["search_all_resources"] @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Awaitable[asset_service.SearchAllIamPoliciesResponse]]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + Awaitable[asset_service.SearchAllIamPoliciesResponse], + ]: r"""Return a callable for the search all iam policies method over gRPC. Searches all IAM policies within the specified scope, such as a @@ -615,18 +643,21 @@ def search_all_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + if "search_all_iam_policies" not in self._stubs: + self._stubs["search_all_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllIamPolicies", request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, ) - return self._stubs['search_all_iam_policies'] + return self._stubs["search_all_iam_policies"] @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Awaitable[asset_service.AnalyzeIamPolicyResponse]]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + Awaitable[asset_service.AnalyzeIamPolicyResponse], + ]: r"""Return a callable for the analyze iam policy method over gRPC. Analyzes IAM policies to answer which identities have @@ -642,18 +673,21 @@ def analyze_iam_policy(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + if "analyze_iam_policy" not in self._stubs: + self._stubs["analyze_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy", request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, ) - return self._stubs['analyze_iam_policy'] + return self._stubs["analyze_iam_policy"] @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Awaitable[operations_pb2.Operation]]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + Awaitable[operations_pb2.Operation], + ]: r"""Return a callable for the analyze iam policy longrunning method over gRPC. Analyzes IAM policies asynchronously to answer which identities @@ -679,18 +713,22 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', + if "analyze_iam_policy_longrunning" not in self._stubs: + self._stubs[ + "analyze_iam_policy_longrunning" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning", request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['analyze_iam_policy_longrunning'] + return self._stubs["analyze_iam_policy_longrunning"] @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Awaitable[asset_service.AnalyzeMoveResponse]]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], Awaitable[asset_service.AnalyzeMoveResponse] + ]: r"""Return a callable for the analyze move method over gRPC. Analyze moving a resource to a specified destination @@ -711,18 +749,20 @@ def analyze_move(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', + if "analyze_move" not in self._stubs: + self._stubs["analyze_move"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeMove", request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, ) - return self._stubs['analyze_move'] + return self._stubs["analyze_move"] @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Awaitable[asset_service.QueryAssetsResponse]]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], Awaitable[asset_service.QueryAssetsResponse] + ]: r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement compatible @@ -752,18 +792,20 @@ def query_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', + if "query_assets" not in self._stubs: + self._stubs["query_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/QueryAssets", request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, ) - return self._stubs['query_assets'] + return self._stubs["query_assets"] @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def create_saved_query( + self, + ) -> Callable[ + [asset_service.CreateSavedQueryRequest], Awaitable[asset_service.SavedQuery] + ]: r"""Return a callable for the create saved query method over gRPC. Creates a saved query in a parent @@ -779,18 +821,20 @@ def create_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + if "create_saved_query" not in self._stubs: + self._stubs["create_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateSavedQuery", request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['create_saved_query'] + return self._stubs["create_saved_query"] @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def get_saved_query( + self, + ) -> Callable[ + [asset_service.GetSavedQueryRequest], Awaitable[asset_service.SavedQuery] + ]: r"""Return a callable for the get saved query method over gRPC. Gets details about a saved query. @@ -805,18 +849,21 @@ def get_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', + if "get_saved_query" not in self._stubs: + self._stubs["get_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetSavedQuery", request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['get_saved_query'] + return self._stubs["get_saved_query"] @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Awaitable[asset_service.ListSavedQueriesResponse]]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Awaitable[asset_service.ListSavedQueriesResponse], + ]: r"""Return a callable for the list saved queries method over gRPC. Lists all saved queries in a parent @@ -832,18 +879,20 @@ def list_saved_queries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', + if "list_saved_queries" not in self._stubs: + self._stubs["list_saved_queries"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListSavedQueries", request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, ) - return self._stubs['list_saved_queries'] + return self._stubs["list_saved_queries"] @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def update_saved_query( + self, + ) -> Callable[ + [asset_service.UpdateSavedQueryRequest], Awaitable[asset_service.SavedQuery] + ]: r"""Return a callable for the update saved query method over gRPC. Updates a saved query. @@ -858,18 +907,18 @@ def update_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + if "update_saved_query" not in self._stubs: + self._stubs["update_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateSavedQuery", request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['update_saved_query'] + return self._stubs["update_saved_query"] @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Awaitable[empty_pb2.Empty]]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete saved query method over gRPC. Deletes a saved query. @@ -884,18 +933,21 @@ def delete_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + if "delete_saved_query" not in self._stubs: + self._stubs["delete_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteSavedQuery", request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_saved_query'] + return self._stubs["delete_saved_query"] @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse]]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse], + ]: r"""Return a callable for the batch get effective iam policies method over gRPC. @@ -911,18 +963,23 @@ def batch_get_effective_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + if "batch_get_effective_iam_policies" not in self._stubs: + self._stubs[ + "batch_get_effective_iam_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies", request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, ) - return self._stubs['batch_get_effective_iam_policies'] + return self._stubs["batch_get_effective_iam_policies"] @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Awaitable[asset_service.AnalyzeOrgPoliciesResponse]]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Awaitable[asset_service.AnalyzeOrgPoliciesResponse], + ]: r"""Return a callable for the analyze org policies method over gRPC. Analyzes organization policies under a scope. @@ -937,18 +994,21 @@ def analyze_org_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + if "analyze_org_policies" not in self._stubs: + self._stubs["analyze_org_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies", request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, ) - return self._stubs['analyze_org_policies'] + return self._stubs["analyze_org_policies"] @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + ]: r"""Return a callable for the analyze org policy governed containers method over gRPC. @@ -965,18 +1025,23 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + if "analyze_org_policy_governed_containers" not in self._stubs: + self._stubs[ + "analyze_org_policy_governed_containers" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers", request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_containers'] + return self._stubs["analyze_org_policy_governed_containers"] @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + ]: r"""Return a callable for the analyze org policy governed assets method over gRPC. @@ -1010,16 +1075,18 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + if "analyze_org_policy_governed_assets" not in self._stubs: + self._stubs[ + "analyze_org_policy_governed_assets" + ] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets", request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_assets'] + return self._stubs["analyze_org_policy_governed_assets"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.export_assets: self._wrap_method( self.export_assets, @@ -1228,8 +1295,7 @@ def kind(self) -> str: def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1243,6 +1309,4 @@ def get_operation( return self._stubs["get_operation"] -__all__ = ( - 'AssetServiceGrpcAsyncIOTransport', -) +__all__ = ("AssetServiceGrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index b7f32b5fc7..f2c151a899 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -13,34 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore import google.protobuf - +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.asset_v1.types import asset_service -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseAssetServiceRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseAssetServiceRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -49,6 +42,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -261,7 +255,14 @@ def post_update_saved_query(self, response): """ - def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_analyze_iam_policy( + self, + request: asset_service.AnalyzeIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for analyze_iam_policy Override in a subclass to manipulate the request or metadata @@ -269,7 +270,9 @@ def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, """ return request, metadata - def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyResponse) -> asset_service.AnalyzeIamPolicyResponse: + def post_analyze_iam_policy( + self, response: asset_service.AnalyzeIamPolicyResponse + ) -> asset_service.AnalyzeIamPolicyResponse: """Post-rpc interceptor for analyze_iam_policy DEPRECATED. Please use the `post_analyze_iam_policy_with_metadata` @@ -282,7 +285,13 @@ def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyRespon """ return response - def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeIamPolicyResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_iam_policy_with_metadata( + self, + response: asset_service.AnalyzeIamPolicyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for analyze_iam_policy Override in a subclass to read or manipulate the response or metadata after it @@ -297,7 +306,14 @@ def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeI """ return response, metadata - def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_iam_policy_longrunning( + self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeIamPolicyLongrunningRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to manipulate the request or metadata @@ -305,7 +321,9 @@ def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPo """ return request, metadata - def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_analyze_iam_policy_longrunning( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning DEPRECATED. Please use the `post_analyze_iam_policy_longrunning_with_metadata` @@ -318,7 +336,11 @@ def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation """ return response - def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_iam_policy_longrunning_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to read or manipulate the response or metadata after it @@ -333,7 +355,13 @@ def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations """ return response, metadata - def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_move( + self, + request: asset_service.AnalyzeMoveRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for analyze_move Override in a subclass to manipulate the request or metadata @@ -341,7 +369,9 @@ def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: """ return request, metadata - def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: + def post_analyze_move( + self, response: asset_service.AnalyzeMoveResponse + ) -> asset_service.AnalyzeMoveResponse: """Post-rpc interceptor for analyze_move DEPRECATED. Please use the `post_analyze_move_with_metadata` @@ -354,7 +384,13 @@ def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asse """ return response - def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_move_with_metadata( + self, + response: asset_service.AnalyzeMoveResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for analyze_move Override in a subclass to read or manipulate the response or metadata after it @@ -369,7 +405,13 @@ def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveRes """ return response, metadata - def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policies( + self, + request: asset_service.AnalyzeOrgPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for analyze_org_policies Override in a subclass to manipulate the request or metadata @@ -377,7 +419,9 @@ def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequ """ return request, metadata - def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: + def post_analyze_org_policies( + self, response: asset_service.AnalyzeOrgPoliciesResponse + ) -> asset_service.AnalyzeOrgPoliciesResponse: """Post-rpc interceptor for analyze_org_policies DEPRECATED. Please use the `post_analyze_org_policies_with_metadata` @@ -390,7 +434,14 @@ def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesRe """ return response - def post_analyze_org_policies_with_metadata(self, response: asset_service.AnalyzeOrgPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policies_with_metadata( + self, + response: asset_service.AnalyzeOrgPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for analyze_org_policies Override in a subclass to read or manipulate the response or metadata after it @@ -405,7 +456,14 @@ def post_analyze_org_policies_with_metadata(self, response: asset_service.Analyz """ return response, metadata - def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policy_governed_assets( + self, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to manipulate the request or metadata @@ -413,7 +471,9 @@ def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeO """ return request, metadata - def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + def post_analyze_org_policy_governed_assets( + self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: """Post-rpc interceptor for analyze_org_policy_governed_assets DEPRECATED. Please use the `post_analyze_org_policy_governed_assets_with_metadata` @@ -426,7 +486,14 @@ def post_analyze_org_policy_governed_assets(self, response: asset_service.Analyz """ return response - def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policy_governed_assets_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to read or manipulate the response or metadata after it @@ -441,7 +508,14 @@ def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_ """ return response, metadata - def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policy_governed_containers( + self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to manipulate the request or metadata @@ -449,7 +523,9 @@ def pre_analyze_org_policy_governed_containers(self, request: asset_service.Anal """ return request, metadata - def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + def post_analyze_org_policy_governed_containers( + self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: """Post-rpc interceptor for analyze_org_policy_governed_containers DEPRECATED. Please use the `post_analyze_org_policy_governed_containers_with_metadata` @@ -462,7 +538,14 @@ def post_analyze_org_policy_governed_containers(self, response: asset_service.An """ return response - def post_analyze_org_policy_governed_containers_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policy_governed_containers_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to read or manipulate the response or metadata after it @@ -477,7 +560,14 @@ def post_analyze_org_policy_governed_containers_with_metadata(self, response: as """ return response, metadata - def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_batch_get_assets_history( + self, + request: asset_service.BatchGetAssetsHistoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetAssetsHistoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for batch_get_assets_history Override in a subclass to manipulate the request or metadata @@ -485,7 +575,9 @@ def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHist """ return request, metadata - def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: + def post_batch_get_assets_history( + self, response: asset_service.BatchGetAssetsHistoryResponse + ) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history DEPRECATED. Please use the `post_batch_get_assets_history_with_metadata` @@ -498,7 +590,14 @@ def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHi """ return response - def post_batch_get_assets_history_with_metadata(self, response: asset_service.BatchGetAssetsHistoryResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_batch_get_assets_history_with_metadata( + self, + response: asset_service.BatchGetAssetsHistoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetAssetsHistoryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for batch_get_assets_history Override in a subclass to read or manipulate the response or metadata after it @@ -513,7 +612,14 @@ def post_batch_get_assets_history_with_metadata(self, response: asset_service.Ba """ return response, metadata - def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_batch_get_effective_iam_policies( + self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetEffectiveIamPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to manipulate the request or metadata @@ -521,7 +627,9 @@ def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEf """ return request, metadata - def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def post_batch_get_effective_iam_policies( + self, response: asset_service.BatchGetEffectiveIamPoliciesResponse + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: """Post-rpc interceptor for batch_get_effective_iam_policies DEPRECATED. Please use the `post_batch_get_effective_iam_policies_with_metadata` @@ -534,7 +642,14 @@ def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGet """ return response - def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_batch_get_effective_iam_policies_with_metadata( + self, + response: asset_service.BatchGetEffectiveIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to read or manipulate the response or metadata after it @@ -549,7 +664,13 @@ def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_se """ return response, metadata - def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_feed( + self, + request: asset_service.CreateFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_feed Override in a subclass to manipulate the request or metadata @@ -570,7 +691,11 @@ def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_feed Override in a subclass to read or manipulate the response or metadata after it @@ -585,7 +710,13 @@ def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: """ return response, metadata - def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_saved_query( + self, + request: asset_service.CreateSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_saved_query Override in a subclass to manipulate the request or metadata @@ -593,7 +724,9 @@ def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, """ return request, metadata - def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_create_saved_query( + self, response: asset_service.SavedQuery + ) -> asset_service.SavedQuery: """Post-rpc interceptor for create_saved_query DEPRECATED. Please use the `post_create_saved_query_with_metadata` @@ -606,7 +739,11 @@ def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response - def post_create_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -621,7 +758,13 @@ def post_create_saved_query_with_metadata(self, response: asset_service.SavedQue """ return response, metadata - def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_feed( + self, + request: asset_service.DeleteFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_feed Override in a subclass to manipulate the request or metadata @@ -629,7 +772,13 @@ def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Se """ return request, metadata - def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_saved_query( + self, + request: asset_service.DeleteSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_saved_query Override in a subclass to manipulate the request or metadata @@ -637,7 +786,13 @@ def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, """ return request, metadata - def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_export_assets( + self, + request: asset_service.ExportAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for export_assets Override in a subclass to manipulate the request or metadata @@ -645,7 +800,9 @@ def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata """ return request, metadata - def post_export_assets(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_export_assets( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for export_assets DEPRECATED. Please use the `post_export_assets_with_metadata` @@ -658,7 +815,11 @@ def post_export_assets(self, response: operations_pb2.Operation) -> operations_p """ return response - def post_export_assets_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_export_assets_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for export_assets Override in a subclass to read or manipulate the response or metadata after it @@ -673,7 +834,11 @@ def post_export_assets_with_metadata(self, response: operations_pb2.Operation, m """ return response, metadata - def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_feed( + self, + request: asset_service.GetFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_feed Override in a subclass to manipulate the request or metadata @@ -694,7 +859,11 @@ def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_feed Override in a subclass to read or manipulate the response or metadata after it @@ -709,7 +878,13 @@ def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Se """ return response, metadata - def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_saved_query( + self, + request: asset_service.GetSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_saved_query Override in a subclass to manipulate the request or metadata @@ -717,7 +892,9 @@ def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metad """ return request, metadata - def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_get_saved_query( + self, response: asset_service.SavedQuery + ) -> asset_service.SavedQuery: """Post-rpc interceptor for get_saved_query DEPRECATED. Please use the `post_get_saved_query_with_metadata` @@ -730,7 +907,11 @@ def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_serv """ return response - def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -745,7 +926,13 @@ def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, """ return response, metadata - def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_assets( + self, + request: asset_service.ListAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_assets Override in a subclass to manipulate the request or metadata @@ -753,7 +940,9 @@ def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Se """ return request, metadata - def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: + def post_list_assets( + self, response: asset_service.ListAssetsResponse + ) -> asset_service.ListAssetsResponse: """Post-rpc interceptor for list_assets DEPRECATED. Please use the `post_list_assets_with_metadata` @@ -766,7 +955,13 @@ def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_ """ return response - def post_list_assets_with_metadata(self, response: asset_service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_assets_with_metadata( + self, + response: asset_service.ListAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_assets Override in a subclass to read or manipulate the response or metadata after it @@ -781,7 +976,11 @@ def post_list_assets_with_metadata(self, response: asset_service.ListAssetsRespo """ return response, metadata - def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_feeds( + self, + request: asset_service.ListFeedsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_feeds Override in a subclass to manipulate the request or metadata @@ -789,7 +988,9 @@ def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequ """ return request, metadata - def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: + def post_list_feeds( + self, response: asset_service.ListFeedsResponse + ) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds DEPRECATED. Please use the `post_list_feeds_with_metadata` @@ -802,7 +1003,13 @@ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_se """ return response - def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_feeds_with_metadata( + self, + response: asset_service.ListFeedsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_feeds Override in a subclass to read or manipulate the response or metadata after it @@ -817,7 +1024,13 @@ def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsRespons """ return response, metadata - def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_saved_queries( + self, + request: asset_service.ListSavedQueriesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_saved_queries Override in a subclass to manipulate the request or metadata @@ -825,7 +1038,9 @@ def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, """ return request, metadata - def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: + def post_list_saved_queries( + self, response: asset_service.ListSavedQueriesResponse + ) -> asset_service.ListSavedQueriesResponse: """Post-rpc interceptor for list_saved_queries DEPRECATED. Please use the `post_list_saved_queries_with_metadata` @@ -838,7 +1053,13 @@ def post_list_saved_queries(self, response: asset_service.ListSavedQueriesRespon """ return response - def post_list_saved_queries_with_metadata(self, response: asset_service.ListSavedQueriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_saved_queries_with_metadata( + self, + response: asset_service.ListSavedQueriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_saved_queries Override in a subclass to read or manipulate the response or metadata after it @@ -853,7 +1074,13 @@ def post_list_saved_queries_with_metadata(self, response: asset_service.ListSave """ return response, metadata - def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_query_assets( + self, + request: asset_service.QueryAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for query_assets Override in a subclass to manipulate the request or metadata @@ -861,7 +1088,9 @@ def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: """ return request, metadata - def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: + def post_query_assets( + self, response: asset_service.QueryAssetsResponse + ) -> asset_service.QueryAssetsResponse: """Post-rpc interceptor for query_assets DEPRECATED. Please use the `post_query_assets_with_metadata` @@ -874,7 +1103,13 @@ def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asse """ return response - def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_query_assets_with_metadata( + self, + response: asset_service.QueryAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for query_assets Override in a subclass to read or manipulate the response or metadata after it @@ -889,7 +1124,14 @@ def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsRes """ return response, metadata - def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_search_all_iam_policies( + self, + request: asset_service.SearchAllIamPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllIamPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for search_all_iam_policies Override in a subclass to manipulate the request or metadata @@ -897,7 +1139,9 @@ def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPolicie """ return request, metadata - def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: + def post_search_all_iam_policies( + self, response: asset_service.SearchAllIamPoliciesResponse + ) -> asset_service.SearchAllIamPoliciesResponse: """Post-rpc interceptor for search_all_iam_policies DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` @@ -910,7 +1154,14 @@ def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPolic """ return response - def post_search_all_iam_policies_with_metadata(self, response: asset_service.SearchAllIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_search_all_iam_policies_with_metadata( + self, + response: asset_service.SearchAllIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllIamPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for search_all_iam_policies Override in a subclass to read or manipulate the response or metadata after it @@ -925,7 +1176,13 @@ def post_search_all_iam_policies_with_metadata(self, response: asset_service.Sea """ return response, metadata - def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_search_all_resources( + self, + request: asset_service.SearchAllResourcesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for search_all_resources Override in a subclass to manipulate the request or metadata @@ -933,7 +1190,9 @@ def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequ """ return request, metadata - def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: + def post_search_all_resources( + self, response: asset_service.SearchAllResourcesResponse + ) -> asset_service.SearchAllResourcesResponse: """Post-rpc interceptor for search_all_resources DEPRECATED. Please use the `post_search_all_resources_with_metadata` @@ -946,7 +1205,14 @@ def post_search_all_resources(self, response: asset_service.SearchAllResourcesRe """ return response - def post_search_all_resources_with_metadata(self, response: asset_service.SearchAllResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_search_all_resources_with_metadata( + self, + response: asset_service.SearchAllResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllResourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for search_all_resources Override in a subclass to read or manipulate the response or metadata after it @@ -961,7 +1227,13 @@ def post_search_all_resources_with_metadata(self, response: asset_service.Search """ return response, metadata - def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_feed( + self, + request: asset_service.UpdateFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_feed Override in a subclass to manipulate the request or metadata @@ -982,7 +1254,11 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_feed Override in a subclass to read or manipulate the response or metadata after it @@ -997,7 +1273,13 @@ def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: """ return response, metadata - def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_saved_query( + self, + request: asset_service.UpdateSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_saved_query Override in a subclass to manipulate the request or metadata @@ -1005,7 +1287,9 @@ def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, """ return request, metadata - def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_update_saved_query( + self, response: asset_service.SavedQuery + ) -> asset_service.SavedQuery: """Post-rpc interceptor for update_saved_query DEPRECATED. Please use the `post_update_saved_query_with_metadata` @@ -1018,7 +1302,11 @@ def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response - def post_update_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -1034,8 +1322,12 @@ def post_update_saved_query_with_metadata(self, response: asset_service.SavedQue return response, metadata def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -1074,56 +1366,57 @@ class AssetServiceRestTransport(_BaseAssetServiceRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AssetServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudasset.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -1135,10 +1428,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -1155,28 +1449,33 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', + "method": "get", + "uri": "/v1/{name=*/*/operations/*/**}", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _AnalyzeIamPolicy(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy, AssetServiceRestStub): + class _AnalyzeIamPolicy( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeIamPolicy") @@ -1188,26 +1487,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeIamPolicyResponse: + def __call__( + self, + request: asset_service.AnalyzeIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Call the analyze iam policy method over HTTP. Args: @@ -1229,30 +1530,42 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() + ) - request, metadata = self._interceptor.pre_analyze_iam_policy(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_analyze_iam_policy( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicy", "httpRequest": http_request, @@ -1261,7 +1574,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1276,20 +1596,26 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeIamPolicyResponse.to_json(response) + response_payload = asset_service.AnalyzeIamPolicyResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicy", "metadata": http_response["headers"], @@ -1298,7 +1624,10 @@ def __call__(self, ) return resp - class _AnalyzeIamPolicyLongrunning(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, AssetServiceRestStub): + class _AnalyzeIamPolicyLongrunning( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeIamPolicyLongrunning") @@ -1310,76 +1639,92 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the analyze iam policy - longrunning method over HTTP. - - Args: - request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): - The request object. A request message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + longrunning method over HTTP. + + Args: + request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): + The request object. A request message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_http_options() + ) - request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_request_body_json(transcoded_request) + body = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicyLongrunning", "httpRequest": http_request, @@ -1388,7 +1733,17 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1401,20 +1756,27 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + ( + resp, + _, + ) = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicyLongrunning", "metadata": http_response["headers"], @@ -1423,7 +1785,9 @@ def __call__(self, ) return resp - class _AnalyzeMove(_BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub): + class _AnalyzeMove( + _BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeMove") @@ -1435,26 +1799,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeMoveRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeMoveResponse: + def __call__( + self, + request: asset_service.AnalyzeMoveRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Call the analyze move method over HTTP. Args: @@ -1476,30 +1842,44 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() + ) request, metadata = self._interceptor.pre_analyze_move(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeMove", "httpRequest": http_request, @@ -1508,7 +1888,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeMove._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeMove._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1523,20 +1910,26 @@ def __call__(self, resp = self._interceptor.post_analyze_move(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_move_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_analyze_move_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeMoveResponse.to_json(response) + response_payload = asset_service.AnalyzeMoveResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_move", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeMove", "metadata": http_response["headers"], @@ -1545,7 +1938,9 @@ def __call__(self, ) return resp - class _AnalyzeOrgPolicies(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub): + class _AnalyzeOrgPolicies( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeOrgPolicies") @@ -1557,26 +1952,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPoliciesResponse: + def __call__( + self, + request: asset_service.AnalyzeOrgPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPoliciesResponse: r"""Call the analyze org policies method over HTTP. Args: @@ -1598,30 +1995,42 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_http_options() + ) - request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_analyze_org_policies( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicies", "httpRequest": http_request, @@ -1630,7 +2039,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1645,20 +2061,26 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_analyze_org_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json(response) + response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicies", "metadata": http_response["headers"], @@ -1667,7 +2089,10 @@ def __call__(self, ) return resp - class _AnalyzeOrgPolicyGovernedAssets(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, AssetServiceRestStub): + class _AnalyzeOrgPolicyGovernedAssets( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedAssets") @@ -1679,72 +2104,89 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + def __call__( + self, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: r"""Call the analyze org policy - governed assets method over HTTP. - - Args: - request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + governed assets method over HTTP. + + Args: + request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_http_options() + ) - request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request(http_options, request) + ( + request, + metadata, + ) = self._interceptor.pre_analyze_org_policy_governed_assets( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedAssets", "httpRequest": http_request, @@ -1753,7 +2195,16 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1768,20 +2219,31 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + ( + resp, + _, + ) = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(response) + response_payload = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json( + response + ) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedAssets", "metadata": http_response["headers"], @@ -1790,7 +2252,10 @@ def __call__(self, ) return resp - class _AnalyzeOrgPolicyGovernedContainers(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, AssetServiceRestStub): + class _AnalyzeOrgPolicyGovernedContainers( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedContainers") @@ -1802,72 +2267,89 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + def __call__( + self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: r"""Call the analyze org policy - governed containers method over HTTP. - - Args: - request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + governed containers method over HTTP. + + Args: + request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_http_options() + ) - request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request(http_options, request) + ( + request, + metadata, + ) = self._interceptor.pre_analyze_org_policy_governed_containers( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedContainers", "httpRequest": http_request, @@ -1876,7 +2358,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1891,20 +2380,29 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + ( + resp, + _, + ) = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(response) + response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedContainers", "metadata": http_response["headers"], @@ -1913,7 +2411,9 @@ def __call__(self, ) return resp - class _BatchGetAssetsHistory(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub): + class _BatchGetAssetsHistory( + _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.BatchGetAssetsHistory") @@ -1925,26 +2425,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.BatchGetAssetsHistoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetAssetsHistoryResponse: + def __call__( + self, + request: asset_service.BatchGetAssetsHistoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Call the batch get assets history method over HTTP. Args: @@ -1963,30 +2465,42 @@ def __call__(self, Batch get assets history response. """ - http_options = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_http_options() + ) - request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_batch_get_assets_history( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetAssetsHistory", "httpRequest": http_request, @@ -1995,7 +2509,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2010,20 +2531,26 @@ def __call__(self, resp = self._interceptor.post_batch_get_assets_history(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.BatchGetAssetsHistoryResponse.to_json(response) + response_payload = ( + asset_service.BatchGetAssetsHistoryResponse.to_json(response) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetAssetsHistory", "metadata": http_response["headers"], @@ -2032,7 +2559,10 @@ def __call__(self, ) return resp - class _BatchGetEffectiveIamPolicies(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, AssetServiceRestStub): + class _BatchGetEffectiveIamPolicies( + _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.BatchGetEffectiveIamPolicies") @@ -2044,72 +2574,86 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def __call__( + self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Call the batch get effective iam - policies method over HTTP. - - Args: - request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): - The request object. A request message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.BatchGetEffectiveIamPoliciesResponse: - A response message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + policies method over HTTP. + + Args: + request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.asset_service.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. """ - http_options = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_http_options() + ) - request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_batch_get_effective_iam_policies( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetEffectiveIamPolicies", "httpRequest": http_request, @@ -2118,7 +2662,16 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2133,20 +2686,31 @@ def __call__(self, resp = self._interceptor.post_batch_get_effective_iam_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_get_effective_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + ( + resp, + _, + ) = self._interceptor.post_batch_get_effective_iam_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(response) + response_payload = ( + asset_service.BatchGetEffectiveIamPoliciesResponse.to_json( + response + ) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetEffectiveIamPolicies", "metadata": http_response["headers"], @@ -2155,7 +2719,9 @@ def __call__(self, ) return resp - class _CreateFeed(_BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub): + class _CreateFeed( + _BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.CreateFeed") @@ -2167,27 +2733,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.CreateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.CreateFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the create feed method over HTTP. Args: @@ -2214,32 +2782,50 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() + ) request, metadata = self._interceptor.pre_create_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request( + http_options, request + ) + ) - body = _BaseAssetServiceRestTransport._BaseCreateFeed._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateFeed", "httpRequest": http_request, @@ -2248,7 +2834,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._CreateFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2263,20 +2857,24 @@ def __call__(self, resp = self._interceptor.post_create_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_feed_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.create_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateFeed", "metadata": http_response["headers"], @@ -2285,7 +2883,9 @@ def __call__(self, ) return resp - class _CreateSavedQuery(_BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub): + class _CreateSavedQuery( + _BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.CreateSavedQuery") @@ -2297,27 +2897,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.CreateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.CreateSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the create saved query method over HTTP. Args: @@ -2338,32 +2940,46 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() + ) - request, metadata = self._interceptor.pre_create_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_create_saved_query( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_request_body_json(transcoded_request) + body = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateSavedQuery", "httpRequest": http_request, @@ -2372,7 +2988,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._CreateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._CreateSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2387,20 +3011,24 @@ def __call__(self, resp = self._interceptor.post_create_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_saved_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.create_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateSavedQuery", "metadata": http_response["headers"], @@ -2409,7 +3037,9 @@ def __call__(self, ) return resp - class _DeleteFeed(_BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub): + class _DeleteFeed( + _BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.DeleteFeed") @@ -2421,26 +3051,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.DeleteFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: asset_service.DeleteFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete feed method over HTTP. Args: @@ -2455,30 +3087,44 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() + ) request, metadata = self._interceptor.pre_delete_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteFeed", "httpRequest": http_request, @@ -2487,14 +3133,23 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._DeleteFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteSavedQuery(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery, AssetServiceRestStub): + class _DeleteSavedQuery( + _BaseAssetServiceRestTransport._BaseDeleteSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.DeleteSavedQuery") @@ -2506,26 +3161,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.DeleteSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: asset_service.DeleteSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete saved query method over HTTP. Args: @@ -2540,30 +3197,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_saved_query( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteSavedQuery", "httpRequest": http_request, @@ -2572,14 +3241,23 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._DeleteSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._DeleteSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ExportAssets(_BaseAssetServiceRestTransport._BaseExportAssets, AssetServiceRestStub): + class _ExportAssets( + _BaseAssetServiceRestTransport._BaseExportAssets, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ExportAssets") @@ -2591,27 +3269,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.ExportAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: asset_service.ExportAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the export assets method over HTTP. Args: @@ -2633,32 +3313,48 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() + ) request, metadata = self._interceptor.pre_export_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request(http_options, request) + transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseExportAssets._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseExportAssets._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ExportAssets", "httpRequest": http_request, @@ -2667,7 +3363,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ExportAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._ExportAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2680,20 +3384,24 @@ def __call__(self, resp = self._interceptor.post_export_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_export_assets_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.export_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ExportAssets", "metadata": http_response["headers"], @@ -2714,26 +3422,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.GetFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.GetFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the get feed method over HTTP. Args: @@ -2760,30 +3470,44 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() + ) request, metadata = self._interceptor.pre_get_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetFeed", "httpRequest": http_request, @@ -2792,7 +3516,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2807,20 +3538,24 @@ def __call__(self, resp = self._interceptor.post_get_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_feed_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.get_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetFeed", "metadata": http_response["headers"], @@ -2829,7 +3564,9 @@ def __call__(self, ) return resp - class _GetSavedQuery(_BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub): + class _GetSavedQuery( + _BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.GetSavedQuery") @@ -2841,26 +3578,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.GetSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.GetSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the get saved query method over HTTP. Args: @@ -2881,30 +3620,40 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() + ) request, metadata = self._interceptor.pre_get_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request(http_options, request) + transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetSavedQuery", "httpRequest": http_request, @@ -2913,7 +3662,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2928,20 +3684,24 @@ def __call__(self, resp = self._interceptor.post_get_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_saved_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.get_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetSavedQuery", "metadata": http_response["headers"], @@ -2950,7 +3710,9 @@ def __call__(self, ) return resp - class _ListAssets(_BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub): + class _ListAssets( + _BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ListAssets") @@ -2962,26 +3724,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListAssetsResponse: + def __call__( + self, + request: asset_service.ListAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListAssetsResponse: r"""Call the list assets method over HTTP. Args: @@ -3000,30 +3764,44 @@ def __call__(self, ListAssets response. """ - http_options = _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() + ) request, metadata = self._interceptor.pre_list_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListAssets", "httpRequest": http_request, @@ -3032,7 +3810,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3047,20 +3832,26 @@ def __call__(self, resp = self._interceptor.post_list_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_assets_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.ListAssetsResponse.to_json(response) + response_payload = asset_service.ListAssetsResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListAssets", "metadata": http_response["headers"], @@ -3069,7 +3860,9 @@ def __call__(self, ) return resp - class _ListFeeds(_BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub): + class _ListFeeds( + _BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ListFeeds") @@ -3081,26 +3874,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListFeedsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListFeedsResponse: + def __call__( + self, + request: asset_service.ListFeedsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Call the list feeds method over HTTP. Args: @@ -3119,30 +3914,44 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() + ) request, metadata = self._interceptor.pre_list_feeds(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListFeeds", "httpRequest": http_request, @@ -3151,7 +3960,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListFeeds._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3166,20 +3982,24 @@ def __call__(self, resp = self._interceptor.post_list_feeds(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_feeds_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_feeds_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.ListFeedsResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_feeds", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListFeeds", "metadata": http_response["headers"], @@ -3188,7 +4008,9 @@ def __call__(self, ) return resp - class _ListSavedQueries(_BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub): + class _ListSavedQueries( + _BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ListSavedQueries") @@ -3200,26 +4022,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListSavedQueriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListSavedQueriesResponse: + def __call__( + self, + request: asset_service.ListSavedQueriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListSavedQueriesResponse: r"""Call the list saved queries method over HTTP. Args: @@ -3238,30 +4062,42 @@ def __call__(self, Response of listing saved queries. """ - http_options = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() + ) - request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_saved_queries( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListSavedQueries", "httpRequest": http_request, @@ -3270,7 +4106,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListSavedQueries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListSavedQueries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3285,20 +4128,26 @@ def __call__(self, resp = self._interceptor.post_list_saved_queries(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_saved_queries_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_saved_queries_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.ListSavedQueriesResponse.to_json(response) + response_payload = asset_service.ListSavedQueriesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_saved_queries", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListSavedQueries", "metadata": http_response["headers"], @@ -3307,7 +4156,9 @@ def __call__(self, ) return resp - class _QueryAssets(_BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub): + class _QueryAssets( + _BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.QueryAssets") @@ -3319,27 +4170,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.QueryAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.QueryAssetsResponse: + def __call__( + self, + request: asset_service.QueryAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Call the query assets method over HTTP. Args: @@ -3358,32 +4211,50 @@ def __call__(self, QueryAssets response. """ - http_options = _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() + ) request, metadata = self._interceptor.pre_query_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request( + http_options, request + ) + ) - body = _BaseAssetServiceRestTransport._BaseQueryAssets._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "QueryAssets", "httpRequest": http_request, @@ -3392,7 +4263,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._QueryAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._QueryAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3407,20 +4286,26 @@ def __call__(self, resp = self._interceptor.post_query_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_query_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_query_assets_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.QueryAssetsResponse.to_json(response) + response_payload = asset_service.QueryAssetsResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.query_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "QueryAssets", "metadata": http_response["headers"], @@ -3429,7 +4314,9 @@ def __call__(self, ) return resp - class _SearchAllIamPolicies(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub): + class _SearchAllIamPolicies( + _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.SearchAllIamPolicies") @@ -3441,26 +4328,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.SearchAllIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllIamPoliciesResponse: + def __call__( + self, + request: asset_service.SearchAllIamPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SearchAllIamPoliciesResponse: r"""Call the search all iam policies method over HTTP. Args: @@ -3479,30 +4368,42 @@ def __call__(self, Search all IAM policies response. """ - http_options = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() + ) - request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_search_all_iam_policies( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllIamPolicies", "httpRequest": http_request, @@ -3511,7 +4412,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._SearchAllIamPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3526,20 +4434,26 @@ def __call__(self, resp = self._interceptor.post_search_all_iam_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) + response_payload = ( + asset_service.SearchAllIamPoliciesResponse.to_json(response) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllIamPolicies", "metadata": http_response["headers"], @@ -3548,7 +4462,9 @@ def __call__(self, ) return resp - class _SearchAllResources(_BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub): + class _SearchAllResources( + _BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.SearchAllResources") @@ -3560,26 +4476,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.SearchAllResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllResourcesResponse: + def __call__( + self, + request: asset_service.SearchAllResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SearchAllResourcesResponse: r"""Call the search all resources method over HTTP. Args: @@ -3598,30 +4516,42 @@ def __call__(self, Search all resources response. """ - http_options = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() + ) - request, metadata = self._interceptor.pre_search_all_resources(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_search_all_resources( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllResources", "httpRequest": http_request, @@ -3630,7 +4560,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._SearchAllResources._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3645,20 +4582,26 @@ def __call__(self, resp = self._interceptor.post_search_all_resources(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_resources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_search_all_resources_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.SearchAllResourcesResponse.to_json(response) + response_payload = asset_service.SearchAllResourcesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_resources", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllResources", "metadata": http_response["headers"], @@ -3667,7 +4610,9 @@ def __call__(self, ) return resp - class _UpdateFeed(_BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub): + class _UpdateFeed( + _BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.UpdateFeed") @@ -3679,27 +4624,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.UpdateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.UpdateFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the update feed method over HTTP. Args: @@ -3726,32 +4673,50 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() + ) request, metadata = self._interceptor.pre_update_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request( + http_options, request + ) + ) - body = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateFeed", "httpRequest": http_request, @@ -3760,7 +4725,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._UpdateFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3775,20 +4748,24 @@ def __call__(self, resp = self._interceptor.post_update_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_feed_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.update_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateFeed", "metadata": http_response["headers"], @@ -3797,7 +4774,9 @@ def __call__(self, ) return resp - class _UpdateSavedQuery(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub): + class _UpdateSavedQuery( + _BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.UpdateSavedQuery") @@ -3809,27 +4788,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.UpdateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.UpdateSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the update saved query method over HTTP. Args: @@ -3850,32 +4831,46 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() + ) - request, metadata = self._interceptor.pre_update_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_saved_query( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_request_body_json(transcoded_request) + body = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateSavedQuery", "httpRequest": http_request, @@ -3884,7 +4879,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._UpdateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._UpdateSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3899,20 +4902,24 @@ def __call__(self, resp = self._interceptor.post_update_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_saved_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.update_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateSavedQuery", "metadata": http_response["headers"], @@ -3922,194 +4929,225 @@ def __call__(self, return resp @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], asset_service.AnalyzeIamPolicyResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], asset_service.AnalyzeMoveResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + asset_service.BatchGetAssetsHistoryResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: + def create_saved_query( + self, + ) -> Callable[[asset_service.CreateSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: + def export_assets( + self, + ) -> Callable[[asset_service.ExportAssetsRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: + def get_feed(self) -> Callable[[asset_service.GetFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: + def get_saved_query( + self, + ) -> Callable[[asset_service.GetSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: + def list_assets( + self, + ) -> Callable[[asset_service.ListAssetsRequest], asset_service.ListAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: + def list_feeds( + self, + ) -> Callable[[asset_service.ListFeedsRequest], asset_service.ListFeedsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], asset_service.ListSavedQueriesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore + return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], asset_service.QueryAssetsResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore + return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + asset_service.SearchAllIamPoliciesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + asset_service.SearchAllResourcesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: + def update_saved_query( + self, + ) -> Callable[[asset_service.UpdateSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): + class _GetOperation( + _BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.GetOperation") @@ -4121,27 +5159,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -4159,30 +5198,42 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", "httpRequest": http_request, @@ -4191,7 +5242,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4202,19 +5260,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", "httpResponse": http_response, @@ -4231,6 +5291,4 @@ def close(self): self._session.close() -__all__=( - 'AssetServiceRestTransport', -) +__all__ = ("AssetServiceRestTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py index 91d6091fe1..f8f831e0a9 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -14,19 +14,17 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from google.cloud.asset_v1.types import asset_service -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore + +from .base import DEFAULT_CLIENT_INFO, AssetServiceTransport class _BaseAssetServiceRestTransport(AssetServiceTransport): @@ -42,14 +40,16 @@ class _BaseAssetServiceRestTransport(AssetServiceTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +73,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,26 +86,32 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseAnalyzeIamPolicy: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "analysisQuery" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "analysisQuery": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicy', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{analysis_query.scope=*/*}:analyzeIamPolicy", + }, ] return http_options @@ -115,11 +123,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_unset_required_fields( + query_params + ) + ) return query_params @@ -127,20 +141,24 @@ class _BaseAnalyzeIamPolicyLongrunning: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning", + "body": "*", + }, ] return http_options @@ -155,17 +173,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_unset_required_fields( + query_params + ) + ) return query_params @@ -173,19 +197,25 @@ class _BaseAnalyzeMove: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "destinationParent" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "destinationParent": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=*/*}:analyzeMove', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=*/*}:analyzeMove", + }, ] return http_options @@ -197,11 +227,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeMove._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_unset_required_fields( + query_params + ) + ) return query_params @@ -209,19 +245,25 @@ class _BaseAnalyzeOrgPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicies", + }, ] return http_options @@ -233,11 +275,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_unset_required_fields( + query_params + ) + ) return query_params @@ -245,19 +293,25 @@ class _BaseAnalyzeOrgPolicyGovernedAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets", + }, ] return http_options @@ -269,11 +323,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -281,35 +341,49 @@ class _BaseAnalyzeOrgPolicyGovernedContainers: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(request) + pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_unset_required_fields( + query_params + ) + ) return query_params @@ -317,19 +391,23 @@ class _BaseBatchGetAssetsHistory: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}:batchGetAssetsHistory", + }, ] return http_options @@ -341,11 +419,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_unset_required_fields( + query_params + ) + ) return query_params @@ -353,19 +437,25 @@ class _BaseBatchGetEffectiveIamPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "names" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "names": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}/effectiveIamPolicies:batchGet", + }, ] return http_options @@ -377,11 +467,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_unset_required_fields( + query_params + ) + ) return query_params @@ -389,20 +485,24 @@ class _BaseCreateFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/feeds', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}/feeds", + "body": "*", + }, ] return http_options @@ -417,17 +517,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -435,20 +541,26 @@ class _BaseCreateSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "savedQueryId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "savedQueryId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/savedQueries', - 'body': 'saved_query', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}/savedQueries", + "body": "saved_query", + }, ] return http_options @@ -463,17 +575,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -481,19 +599,23 @@ class _BaseDeleteFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/feeds/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=*/*/feeds/*}", + }, ] return http_options @@ -505,11 +627,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -517,19 +645,23 @@ class _BaseDeleteSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=*/*/savedQueries/*}", + }, ] return http_options @@ -541,11 +673,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -553,20 +691,24 @@ class _BaseExportAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:exportAssets', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}:exportAssets", + "body": "*", + }, ] return http_options @@ -581,17 +723,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseExportAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseExportAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -599,19 +747,23 @@ class _BaseGetFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/feeds/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/feeds/*}", + }, ] return http_options @@ -623,11 +775,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -635,19 +793,23 @@ class _BaseGetSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/savedQueries/*}", + }, ] return http_options @@ -659,11 +821,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseGetSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -671,19 +839,23 @@ class _BaseListAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/assets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/assets", + }, ] return http_options @@ -695,11 +867,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -707,19 +885,23 @@ class _BaseListFeeds: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/feeds', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/feeds", + }, ] return http_options @@ -731,11 +913,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields( + query_params + ) + ) return query_params @@ -743,19 +931,23 @@ class _BaseListSavedQueries: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/savedQueries', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/savedQueries", + }, ] return http_options @@ -767,11 +959,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListSavedQueries._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseListSavedQueries._get_unset_required_fields( + query_params + ) + ) return query_params @@ -779,20 +977,24 @@ class _BaseQueryAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:queryAssets', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}:queryAssets", + "body": "*", + }, ] return http_options @@ -807,17 +1009,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseQueryAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -825,19 +1033,23 @@ class _BaseSearchAllIamPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllIamPolicies', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:searchAllIamPolicies", + }, ] return http_options @@ -849,11 +1061,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields( + query_params + ) + ) return query_params @@ -861,19 +1079,23 @@ class _BaseSearchAllResources: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllResources', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:searchAllResources", + }, ] return http_options @@ -885,11 +1107,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields( + query_params + ) + ) return query_params @@ -897,20 +1125,24 @@ class _BaseUpdateFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{feed.name=*/*/feeds/*}', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{feed.name=*/*/feeds/*}", + "body": "*", + }, ] return http_options @@ -925,17 +1157,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -943,20 +1181,26 @@ class _BaseUpdateSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', - 'body': 'saved_query', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{saved_query.name=*/*/savedQueries/*}", + "body": "saved_query", + }, ] return http_options @@ -971,17 +1215,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -991,26 +1241,24 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/operations/*/**}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseAssetServiceRestTransport', -) +__all__ = ("_BaseAssetServiceRestTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index d8a9b7f910..7d558fc697 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -34,6 +34,7 @@ BatchGetEffectiveIamPoliciesRequest, BatchGetEffectiveIamPoliciesResponse, BigQueryDestination, + ContentType, CreateFeedRequest, CreateSavedQueryRequest, DeleteFeedRequest, @@ -74,7 +75,6 @@ TableSchema, UpdateFeedRequest, UpdateSavedQueryRequest, - ContentType, ) from .assets import ( Asset, @@ -96,81 +96,81 @@ ) __all__ = ( - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "ContentType", + "Asset", + "AttachedResource", + "ConditionEvaluation", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "TemporalAsset", + "TimeWindow", + "VersionedResource", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 3caaefee47..caa108900b 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -17,9 +17,6 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.asset_v1.types import assets as gca_assets from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -27,72 +24,74 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore +import proto # type: ignore +from google.cloud.asset_v1.types import assets as gca_assets __protobuf__ = proto.module( - package='google.cloud.asset.v1', + package="google.cloud.asset.v1", manifest={ - 'ContentType', - 'AnalyzeIamPolicyLongrunningMetadata', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'CreateFeedRequest', - 'GetFeedRequest', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'UpdateFeedRequest', - 'DeleteFeedRequest', - 'OutputConfig', - 'OutputResult', - 'GcsOutputResult', - 'GcsDestination', - 'BigQueryDestination', - 'PartitionSpec', - 'PubsubDestination', - 'FeedOutputConfig', - 'Feed', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'IamPolicyAnalysisQuery', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'IamPolicyAnalysisOutputConfig', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'SavedQuery', - 'CreateSavedQueryRequest', - 'GetSavedQueryRequest', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'UpdateSavedQueryRequest', - 'DeleteSavedQueryRequest', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'TableSchema', - 'TableFieldSchema', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', + "ContentType", + "AnalyzeIamPolicyLongrunningMetadata", + "ExportAssetsRequest", + "ExportAssetsResponse", + "ListAssetsRequest", + "ListAssetsResponse", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "CreateFeedRequest", + "GetFeedRequest", + "ListFeedsRequest", + "ListFeedsResponse", + "UpdateFeedRequest", + "DeleteFeedRequest", + "OutputConfig", + "OutputResult", + "GcsOutputResult", + "GcsDestination", + "BigQueryDestination", + "PartitionSpec", + "PubsubDestination", + "FeedOutputConfig", + "Feed", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "IamPolicyAnalysisQuery", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "IamPolicyAnalysisOutputConfig", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "SavedQuery", + "CreateSavedQueryRequest", + "GetSavedQueryRequest", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "UpdateSavedQueryRequest", + "DeleteSavedQueryRequest", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "TableSchema", + "TableFieldSchema", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", }, ) @@ -224,15 +223,15 @@ class ExportAssetsRequest(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=5, - message='OutputConfig', + message="OutputConfig", ) relationship_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -267,15 +266,15 @@ class ExportAssetsResponse(proto.Message): number=1, message=timestamp_pb2.Timestamp, ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=2, - message='OutputConfig', + message="OutputConfig", ) - output_result: 'OutputResult' = proto.Field( + output_result: "OutputResult" = proto.Field( proto.MESSAGE, number=3, - message='OutputResult', + message="OutputResult", ) @@ -368,10 +367,10 @@ class ListAssetsRequest(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) page_size: int = proto.Field( proto.INT32, @@ -479,10 +478,10 @@ class BatchGetAssetsHistoryRequest(proto.Message): proto.STRING, number=2, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=3, - enum='ContentType', + enum="ContentType", ) read_time_window: gca_assets.TimeWindow = proto.Field( proto.MESSAGE, @@ -543,10 +542,10 @@ class CreateFeedRequest(proto.Message): proto.STRING, number=2, ) - feed: 'Feed' = proto.Field( + feed: "Feed" = proto.Field( proto.MESSAGE, number=3, - message='Feed', + message="Feed", ) @@ -594,10 +593,10 @@ class ListFeedsResponse(proto.Message): A list of feeds. """ - feeds: MutableSequence['Feed'] = proto.RepeatedField( + feeds: MutableSequence["Feed"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Feed', + message="Feed", ) @@ -617,10 +616,10 @@ class UpdateFeedRequest(proto.Message): contain fields that are immutable or only set by the server. """ - feed: 'Feed' = proto.Field( + feed: "Feed" = proto.Field( proto.MESSAGE, number=1, - message='Feed', + message="Feed", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -669,17 +668,17 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - gcs_destination: 'GcsDestination' = proto.Field( + gcs_destination: "GcsDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='GcsDestination', + oneof="destination", + message="GcsDestination", ) - bigquery_destination: 'BigQueryDestination' = proto.Field( + bigquery_destination: "BigQueryDestination" = proto.Field( proto.MESSAGE, number=2, - oneof='destination', - message='BigQueryDestination', + oneof="destination", + message="BigQueryDestination", ) @@ -695,11 +694,11 @@ class OutputResult(proto.Message): This field is a member of `oneof`_ ``result``. """ - gcs_result: 'GcsOutputResult' = proto.Field( + gcs_result: "GcsOutputResult" = proto.Field( proto.MESSAGE, number=1, - oneof='result', - message='GcsOutputResult', + oneof="result", + message="GcsOutputResult", ) @@ -759,12 +758,12 @@ class GcsDestination(proto.Message): uri: str = proto.Field( proto.STRING, number=1, - oneof='object_uri', + oneof="object_uri", ) uri_prefix: str = proto.Field( proto.STRING, number=2, - oneof='object_uri', + oneof="object_uri", ) @@ -864,10 +863,10 @@ class BigQueryDestination(proto.Message): proto.BOOL, number=3, ) - partition_spec: 'PartitionSpec' = proto.Field( + partition_spec: "PartitionSpec" = proto.Field( proto.MESSAGE, number=4, - message='PartitionSpec', + message="PartitionSpec", ) separate_tables_per_asset_type: bool = proto.Field( proto.BOOL, @@ -884,6 +883,7 @@ class PartitionSpec(proto.Message): The partition key for BigQuery partitioned table. """ + class PartitionKey(proto.Enum): r"""This enum is used to determine the partition key column when exporting assets to BigQuery partitioned table(s). Note that, if the @@ -948,11 +948,11 @@ class FeedOutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - pubsub_destination: 'PubsubDestination' = proto.Field( + pubsub_destination: "PubsubDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='PubsubDestination', + oneof="destination", + message="PubsubDestination", ) @@ -1048,15 +1048,15 @@ class Feed(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) - feed_output_config: 'FeedOutputConfig' = proto.Field( + feed_output_config: "FeedOutputConfig" = proto.Field( proto.MESSAGE, number=5, - message='FeedOutputConfig', + message="FeedOutputConfig", ) condition: expr_pb2.Expr = proto.Field( proto.MESSAGE, @@ -1726,7 +1726,7 @@ class ConditionContext(proto.Message): access_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, - oneof='TimeContext', + oneof="TimeContext", message=timestamp_pb2.Timestamp, ) @@ -1802,10 +1802,10 @@ class AnalyzeIamPolicyRequest(proto.Message): Default is empty. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) saved_analysis_query: str = proto.Field( proto.STRING, @@ -1858,12 +1858,14 @@ class IamPolicyAnalysis(proto.Message): the query handling. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) - analysis_results: MutableSequence[gca_assets.IamPolicyAnalysisResult] = proto.RepeatedField( + analysis_results: MutableSequence[ + gca_assets.IamPolicyAnalysisResult + ] = proto.RepeatedField( proto.MESSAGE, number=2, message=gca_assets.IamPolicyAnalysisResult, @@ -1872,7 +1874,9 @@ class IamPolicyAnalysis(proto.Message): proto.BOOL, number=3, ) - non_critical_errors: MutableSequence[gca_assets.IamPolicyAnalysisState] = proto.RepeatedField( + non_critical_errors: MutableSequence[ + gca_assets.IamPolicyAnalysisState + ] = proto.RepeatedField( proto.MESSAGE, number=5, message=gca_assets.IamPolicyAnalysisState, @@ -1883,7 +1887,9 @@ class IamPolicyAnalysis(proto.Message): number=1, message=IamPolicyAnalysis, ) - service_account_impersonation_analysis: MutableSequence[IamPolicyAnalysis] = proto.RepeatedField( + service_account_impersonation_analysis: MutableSequence[ + IamPolicyAnalysis + ] = proto.RepeatedField( proto.MESSAGE, number=2, message=IamPolicyAnalysis, @@ -1982,6 +1988,7 @@ class BigQueryDestination(proto.Message): successfully. Details are at https://cloud.google.com/bigquery/docs/loading-data-local#appending_to_or_overwriting_a_table_using_a_local_file. """ + class PartitionKey(proto.Enum): r"""This enum determines the partition key column for the bigquery tables. Partitioning can improve query performance and @@ -2011,10 +2018,10 @@ class PartitionKey(proto.Enum): proto.STRING, number=2, ) - partition_key: 'IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey' = proto.Field( + partition_key: "IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey" = proto.Field( proto.ENUM, number=3, - enum='IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey', + enum="IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey", ) write_disposition: str = proto.Field( proto.STRING, @@ -2024,13 +2031,13 @@ class PartitionKey(proto.Enum): gcs_destination: GcsDestination = proto.Field( proto.MESSAGE, number=1, - oneof='destination', + oneof="destination", message=GcsDestination, ) bigquery_destination: BigQueryDestination = proto.Field( proto.MESSAGE, number=2, - oneof='destination', + oneof="destination", message=BigQueryDestination, ) @@ -2066,19 +2073,19 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): where the results will be output to. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) saved_analysis_query: str = proto.Field( proto.STRING, number=3, ) - output_config: 'IamPolicyAnalysisOutputConfig' = proto.Field( + output_config: "IamPolicyAnalysisOutputConfig" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisOutputConfig', + message="IamPolicyAnalysisOutputConfig", ) @@ -2139,11 +2146,11 @@ class QueryContent(proto.Message): This field is a member of `oneof`_ ``query_content``. """ - iam_policy_analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + iam_policy_analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - oneof='query_content', - message='IamPolicyAnalysisQuery', + oneof="query_content", + message="IamPolicyAnalysisQuery", ) name: str = proto.Field( @@ -2216,10 +2223,10 @@ class CreateSavedQueryRequest(proto.Message): proto.STRING, number=1, ) - saved_query: 'SavedQuery' = proto.Field( + saved_query: "SavedQuery" = proto.Field( proto.MESSAGE, number=2, - message='SavedQuery', + message="SavedQuery", ) saved_query_id: str = proto.Field( proto.STRING, @@ -2317,10 +2324,10 @@ class ListSavedQueriesResponse(proto.Message): def raw_page(self): return self - saved_queries: MutableSequence['SavedQuery'] = proto.RepeatedField( + saved_queries: MutableSequence["SavedQuery"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='SavedQuery', + message="SavedQuery", ) next_page_token: str = proto.Field( proto.STRING, @@ -2345,10 +2352,10 @@ class UpdateSavedQueryRequest(proto.Message): Required. The list of fields to update. """ - saved_query: 'SavedQuery' = proto.Field( + saved_query: "SavedQuery" = proto.Field( proto.MESSAGE, number=1, - message='SavedQuery', + message="SavedQuery", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2400,6 +2407,7 @@ class AnalyzeMoveRequest(proto.Message): should be included in the analysis response. If unspecified, the default view is FULL. """ + class AnalysisView(proto.Enum): r"""View enum for supporting partial analysis responses. @@ -2445,10 +2453,10 @@ class AnalyzeMoveResponse(proto.Message): services. """ - move_analysis: MutableSequence['MoveAnalysis'] = proto.RepeatedField( + move_analysis: MutableSequence["MoveAnalysis"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='MoveAnalysis', + message="MoveAnalysis", ) @@ -2482,16 +2490,16 @@ class MoveAnalysis(proto.Message): proto.STRING, number=1, ) - analysis: 'MoveAnalysisResult' = proto.Field( + analysis: "MoveAnalysisResult" = proto.Field( proto.MESSAGE, number=2, - oneof='result', - message='MoveAnalysisResult', + oneof="result", + message="MoveAnalysisResult", ) error: status_pb2.Status = proto.Field( proto.MESSAGE, number=3, - oneof='result', + oneof="result", message=status_pb2.Status, ) @@ -2512,15 +2520,15 @@ class MoveAnalysisResult(proto.Message): but will not block moves at runtime. """ - blockers: MutableSequence['MoveImpact'] = proto.RepeatedField( + blockers: MutableSequence["MoveImpact"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='MoveImpact', + message="MoveImpact", ) - warnings: MutableSequence['MoveImpact'] = proto.RepeatedField( + warnings: MutableSequence["MoveImpact"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='MoveImpact', + message="MoveImpact", ) @@ -2688,12 +2696,12 @@ class QueryAssetsRequest(proto.Message): statement: str = proto.Field( proto.STRING, number=2, - oneof='query', + oneof="query", ) job_reference: str = proto.Field( proto.STRING, number=3, - oneof='query', + oneof="query", ) page_size: int = proto.Field( proto.INT32, @@ -2711,19 +2719,19 @@ class QueryAssetsRequest(proto.Message): read_time_window: gca_assets.TimeWindow = proto.Field( proto.MESSAGE, number=7, - oneof='time', + oneof="time", message=gca_assets.TimeWindow, ) read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=8, - oneof='time', + oneof="time", message=timestamp_pb2.Timestamp, ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( + output_config: "QueryAssetsOutputConfig" = proto.Field( proto.MESSAGE, number=9, - message='QueryAssetsOutputConfig', + message="QueryAssetsOutputConfig", ) @@ -2776,20 +2784,20 @@ class QueryAssetsResponse(proto.Message): error: status_pb2.Status = proto.Field( proto.MESSAGE, number=3, - oneof='response', + oneof="response", message=status_pb2.Status, ) - query_result: 'QueryResult' = proto.Field( + query_result: "QueryResult" = proto.Field( proto.MESSAGE, number=4, - oneof='response', - message='QueryResult', + oneof="response", + message="QueryResult", ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( + output_config: "QueryAssetsOutputConfig" = proto.Field( proto.MESSAGE, number=5, - oneof='response', - message='QueryAssetsOutputConfig', + oneof="response", + message="QueryAssetsOutputConfig", ) @@ -2821,10 +2829,10 @@ def raw_page(self): number=1, message=struct_pb2.Struct, ) - schema: 'TableSchema' = proto.Field( + schema: "TableSchema" = proto.Field( proto.MESSAGE, number=2, - message='TableSchema', + message="TableSchema", ) next_page_token: str = proto.Field( proto.STRING, @@ -2844,10 +2852,10 @@ class TableSchema(proto.Message): Describes the fields in a table. """ - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + fields: MutableSequence["TableFieldSchema"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='TableFieldSchema', + message="TableFieldSchema", ) @@ -2898,10 +2906,10 @@ class TableFieldSchema(proto.Message): proto.STRING, number=3, ) - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + fields: MutableSequence["TableFieldSchema"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='TableFieldSchema', + message="TableFieldSchema", ) @@ -3020,10 +3028,12 @@ class PolicyInfo(proto.Message): proto.STRING, number=1, ) - policies: MutableSequence['BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo'] = proto.RepeatedField( + policies: MutableSequence[ + "BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo" + ] = proto.RepeatedField( proto.MESSAGE, number=2, - message='BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo', + message="BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo", ) policy_results: MutableSequence[EffectiveIamPolicy] = proto.RepeatedField( @@ -3131,26 +3141,26 @@ class StringValues(proto.Message): number=2, ) - values: 'AnalyzerOrgPolicy.Rule.StringValues' = proto.Field( + values: "AnalyzerOrgPolicy.Rule.StringValues" = proto.Field( proto.MESSAGE, number=3, - oneof='kind', - message='AnalyzerOrgPolicy.Rule.StringValues', + oneof="kind", + message="AnalyzerOrgPolicy.Rule.StringValues", ) allow_all: bool = proto.Field( proto.BOOL, number=4, - oneof='kind', + oneof="kind", ) deny_all: bool = proto.Field( proto.BOOL, number=5, - oneof='kind', + oneof="kind", ) enforce: bool = proto.Field( proto.BOOL, number=6, - oneof='kind', + oneof="kind", ) condition: expr_pb2.Expr = proto.Field( proto.MESSAGE, @@ -3241,6 +3251,7 @@ class Constraint(proto.Message): This field is a member of `oneof`_ ``constraint_type``. """ + class ConstraintDefault(proto.Enum): r"""Specifies the default behavior in the absence of any ``Policy`` for the ``Constraint``. This must not be @@ -3313,22 +3324,24 @@ class BooleanConstraint(proto.Message): proto.STRING, number=3, ) - constraint_default: 'AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault' = proto.Field( + constraint_default: "AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault" = proto.Field( proto.ENUM, number=4, - enum='AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault', + enum="AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault", ) - list_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.ListConstraint' = proto.Field( - proto.MESSAGE, - number=5, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.ListConstraint', + list_constraint: "AnalyzerOrgPolicyConstraint.Constraint.ListConstraint" = ( + proto.Field( + proto.MESSAGE, + number=5, + oneof="constraint_type", + message="AnalyzerOrgPolicyConstraint.Constraint.ListConstraint", + ) ) - boolean_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint' = proto.Field( + boolean_constraint: "AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint" = proto.Field( proto.MESSAGE, number=6, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint', + oneof="constraint_type", + message="AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint", ) class CustomConstraint(proto.Message): @@ -3363,6 +3376,7 @@ class CustomConstraint(proto.Message): Detailed information about this custom policy constraint. """ + class MethodType(proto.Enum): r"""The operation in which this constraint will be applied. For example: If the constraint applies only when create VMs, the method_types @@ -3410,19 +3424,23 @@ class ActionType(proto.Enum): proto.STRING, number=2, ) - method_types: MutableSequence['AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType'] = proto.RepeatedField( + method_types: MutableSequence[ + "AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType" + ] = proto.RepeatedField( proto.ENUM, number=3, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType', + enum="AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType", ) condition: str = proto.Field( proto.STRING, number=4, ) - action_type: 'AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType' = proto.Field( - proto.ENUM, - number=5, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType', + action_type: "AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType" = ( + proto.Field( + proto.ENUM, + number=5, + enum="AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType", + ) ) display_name: str = proto.Field( proto.STRING, @@ -3436,13 +3454,13 @@ class ActionType(proto.Enum): google_defined_constraint: Constraint = proto.Field( proto.MESSAGE, number=1, - oneof='constraint_definition', + oneof="constraint_definition", message=Constraint, ) custom_constraint: CustomConstraint = proto.Field( proto.MESSAGE, number=2, - oneof='constraint_definition', + oneof="constraint_definition", message=CustomConstraint, ) @@ -3549,15 +3567,15 @@ class OrgPolicyResult(proto.Message): also appear in the list. """ - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=1, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3569,10 +3587,10 @@ def raw_page(self): number=1, message=OrgPolicyResult, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, @@ -3699,15 +3717,15 @@ class GovernedContainer(proto.Message): proto.STRING, number=2, ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=3, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3719,10 +3737,10 @@ def raw_page(self): number=1, message=GovernedContainer, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, @@ -3963,27 +3981,29 @@ class GovernedAsset(proto.Message): also appear in the list. """ - governed_resource: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource', + governed_resource: "AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource" = ( + proto.Field( + proto.MESSAGE, + number=1, + oneof="governed_asset", + message="AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource", + ) ) - governed_iam_policy: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy' = proto.Field( + governed_iam_policy: "AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy" = proto.Field( proto.MESSAGE, number=2, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy', + oneof="governed_asset", + message="AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy", ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=3, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3995,10 +4015,10 @@ def raw_page(self): number=1, message=GovernedAsset, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index d2097a07a4..bbf535dfcb 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -17,38 +17,38 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore from google.cloud.osconfig.v1 import inventory_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.identity.accesscontextmanager.v1 import access_level_pb2 # type: ignore from google.identity.accesscontextmanager.v1 import access_policy_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import service_perimeter_pb2 # type: ignore +from google.identity.accesscontextmanager.v1 import ( + service_perimeter_pb2, +) # type: ignore from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import code_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.asset.v1', + package="google.cloud.asset.v1", manifest={ - 'TemporalAsset', - 'TimeWindow', - 'Asset', - 'Resource', - 'RelatedAssets', - 'RelationshipAttributes', - 'RelatedAsset', - 'ResourceSearchResult', - 'VersionedResource', - 'AttachedResource', - 'RelatedResources', - 'RelatedResource', - 'IamPolicySearchResult', - 'IamPolicyAnalysisState', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', + "TemporalAsset", + "TimeWindow", + "Asset", + "Resource", + "RelatedAssets", + "RelationshipAttributes", + "RelatedAsset", + "ResourceSearchResult", + "VersionedResource", + "AttachedResource", + "RelatedResources", + "RelatedResource", + "IamPolicySearchResult", + "IamPolicyAnalysisState", + "ConditionEvaluation", + "IamPolicyAnalysisResult", }, ) @@ -73,6 +73,7 @@ class TemporalAsset(proto.Message): PRESENT. Currently this is only set for responses in Real-Time Feed. """ + class PriorAssetState(proto.Enum): r"""State of prior asset. @@ -94,29 +95,29 @@ class PriorAssetState(proto.Enum): DOES_NOT_EXIST = 3 DELETED = 4 - window: 'TimeWindow' = proto.Field( + window: "TimeWindow" = proto.Field( proto.MESSAGE, number=1, - message='TimeWindow', + message="TimeWindow", ) deleted: bool = proto.Field( proto.BOOL, number=2, ) - asset: 'Asset' = proto.Field( + asset: "Asset" = proto.Field( proto.MESSAGE, number=3, - message='Asset', + message="Asset", ) prior_asset_state: PriorAssetState = proto.Field( proto.ENUM, number=4, enum=PriorAssetState, ) - prior_asset: 'Asset' = proto.Field( + prior_asset: "Asset" = proto.Field( proto.MESSAGE, number=5, - message='Asset', + message="Asset", ) @@ -253,10 +254,10 @@ class Asset(proto.Message): proto.STRING, number=2, ) - resource: 'Resource' = proto.Field( + resource: "Resource" = proto.Field( proto.MESSAGE, number=3, - message='Resource', + message="Resource", ) iam_policy: policy_pb2.Policy = proto.Field( proto.MESSAGE, @@ -271,19 +272,19 @@ class Asset(proto.Message): access_policy: access_policy_pb2.AccessPolicy = proto.Field( proto.MESSAGE, number=7, - oneof='access_context_policy', + oneof="access_context_policy", message=access_policy_pb2.AccessPolicy, ) access_level: access_level_pb2.AccessLevel = proto.Field( proto.MESSAGE, number=8, - oneof='access_context_policy', + oneof="access_context_policy", message=access_level_pb2.AccessLevel, ) service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( proto.MESSAGE, number=9, - oneof='access_context_policy', + oneof="access_context_policy", message=service_perimeter_pb2.ServicePerimeter, ) os_inventory: inventory_pb2.Inventory = proto.Field( @@ -291,15 +292,15 @@ class Asset(proto.Message): number=12, message=inventory_pb2.Inventory, ) - related_assets: 'RelatedAssets' = proto.Field( + related_assets: "RelatedAssets" = proto.Field( proto.MESSAGE, number=13, - message='RelatedAssets', + message="RelatedAssets", ) - related_asset: 'RelatedAsset' = proto.Field( + related_asset: "RelatedAsset" = proto.Field( proto.MESSAGE, number=15, - message='RelatedAsset', + message="RelatedAsset", ) ancestors: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -400,15 +401,15 @@ class RelatedAssets(proto.Message): The peer resources of the relationship. """ - relationship_attributes: 'RelationshipAttributes' = proto.Field( + relationship_attributes: "RelationshipAttributes" = proto.Field( proto.MESSAGE, number=1, - message='RelationshipAttributes', + message="RelationshipAttributes", ) - assets: MutableSequence['RelatedAsset'] = proto.RepeatedField( + assets: MutableSequence["RelatedAsset"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='RelatedAsset', + message="RelatedAsset", ) @@ -888,21 +889,21 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=19, ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + versioned_resources: MutableSequence["VersionedResource"] = proto.RepeatedField( proto.MESSAGE, number=16, - message='VersionedResource', + message="VersionedResource", ) - attached_resources: MutableSequence['AttachedResource'] = proto.RepeatedField( + attached_resources: MutableSequence["AttachedResource"] = proto.RepeatedField( proto.MESSAGE, number=20, - message='AttachedResource', + message="AttachedResource", ) - relationships: MutableMapping[str, 'RelatedResources'] = proto.MapField( + relationships: MutableMapping[str, "RelatedResources"] = proto.MapField( proto.STRING, proto.MESSAGE, number=21, - message='RelatedResources', + message="RelatedResources", ) tag_keys: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -985,10 +986,10 @@ class AttachedResource(proto.Message): proto.STRING, number=1, ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + versioned_resources: MutableSequence["VersionedResource"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='VersionedResource', + message="VersionedResource", ) @@ -1001,10 +1002,10 @@ class RelatedResources(proto.Message): resource. """ - related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( + related_resources: MutableSequence["RelatedResource"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='RelatedResource', + message="RelatedResource", ) @@ -1142,11 +1143,13 @@ class Permissions(proto.Message): number=1, ) - matched_permissions: MutableMapping[str, 'IamPolicySearchResult.Explanation.Permissions'] = proto.MapField( + matched_permissions: MutableMapping[ + str, "IamPolicySearchResult.Explanation.Permissions" + ] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, - message='IamPolicySearchResult.Explanation.Permissions', + message="IamPolicySearchResult.Explanation.Permissions", ) resource: str = proto.Field( @@ -1219,6 +1222,7 @@ class ConditionEvaluation(proto.Message): evaluation_value (google.cloud.asset_v1.types.ConditionEvaluation.EvaluationValue): The evaluation result. """ + class EvaluationValue(proto.Enum): r"""Value of this expression. @@ -1290,10 +1294,10 @@ class Resource(proto.Message): proto.STRING, number=1, ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Access(proto.Message): @@ -1322,17 +1326,17 @@ class Access(proto.Message): role: str = proto.Field( proto.STRING, number=1, - oneof='oneof_access', + oneof="oneof_access", ) permission: str = proto.Field( proto.STRING, number=2, - oneof='oneof_access', + oneof="oneof_access", ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=3, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Identity(proto.Message): @@ -1360,10 +1364,10 @@ class Identity(proto.Message): proto.STRING, number=1, ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Edge(proto.Message): @@ -1437,25 +1441,31 @@ class AccessControlList(proto.Message): defined in the above IAM policy binding. """ - resources: MutableSequence['IamPolicyAnalysisResult.Resource'] = proto.RepeatedField( + resources: MutableSequence[ + "IamPolicyAnalysisResult.Resource" + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message='IamPolicyAnalysisResult.Resource', + message="IamPolicyAnalysisResult.Resource", ) - accesses: MutableSequence['IamPolicyAnalysisResult.Access'] = proto.RepeatedField( + accesses: MutableSequence[ + "IamPolicyAnalysisResult.Access" + ] = proto.RepeatedField( proto.MESSAGE, number=2, - message='IamPolicyAnalysisResult.Access', + message="IamPolicyAnalysisResult.Access", ) - resource_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( + resource_edges: MutableSequence[ + "IamPolicyAnalysisResult.Edge" + ] = proto.RepeatedField( proto.MESSAGE, number=3, - message='IamPolicyAnalysisResult.Edge', + message="IamPolicyAnalysisResult.Edge", ) - condition_evaluation: 'ConditionEvaluation' = proto.Field( + condition_evaluation: "ConditionEvaluation" = proto.Field( proto.MESSAGE, number=4, - message='ConditionEvaluation', + message="ConditionEvaluation", ) class IdentityList(proto.Message): @@ -1483,15 +1493,19 @@ class IdentityList(proto.Message): enabled in request. """ - identities: MutableSequence['IamPolicyAnalysisResult.Identity'] = proto.RepeatedField( + identities: MutableSequence[ + "IamPolicyAnalysisResult.Identity" + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message='IamPolicyAnalysisResult.Identity', + message="IamPolicyAnalysisResult.Identity", ) - group_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( + group_edges: MutableSequence[ + "IamPolicyAnalysisResult.Edge" + ] = proto.RepeatedField( proto.MESSAGE, number=2, - message='IamPolicyAnalysisResult.Edge', + message="IamPolicyAnalysisResult.Edge", ) attached_resource_full_name: str = proto.Field( diff --git a/tests/integration/goldens/asset/noxfile.py b/tests/integration/goldens/asset/noxfile.py index 487acba725..4ab620d505 100755 --- a/tests/integration/goldens/asset/noxfile.py +++ b/tests/integration/goldens/asset/noxfile.py @@ -17,7 +17,6 @@ import pathlib import re import shutil - from typing import Dict, List import warnings @@ -235,7 +234,12 @@ def unit(session, protobuf_implementation): # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -377,8 +381,10 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), @@ -446,7 +452,12 @@ def prerelease_deps(session, protobuf_implementation): # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py index 972a326903..0366d7e4fb 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -52,4 +52,5 @@ async def sample_analyze_iam_policy(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index 677d79556e..05ba0fdb24 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -60,4 +60,5 @@ async def sample_analyze_iam_policy_longrunning(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py index 603d16e9c0..f11eb04045 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -60,4 +60,5 @@ def sample_analyze_iam_policy_longrunning(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py index 2e62398082..f7dda0c2a1 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -52,4 +52,5 @@ def sample_analyze_iam_policy(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py index 1a09c54644..dbaf7a43f3 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py @@ -50,4 +50,5 @@ async def sample_analyze_move(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeMove_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py index e1c4d69361..9b99769d15 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py @@ -50,4 +50,5 @@ def sample_analyze_move(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeMove_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py index b7c1b35df7..1051e32e3e 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py @@ -51,4 +51,5 @@ async def sample_analyze_org_policies(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py index 315543e7d5..b738f76fd5 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py @@ -51,4 +51,5 @@ def sample_analyze_org_policies(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py index 8ed6cc039b..3b2eac7cad 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py @@ -51,4 +51,5 @@ async def sample_analyze_org_policy_governed_assets(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py index a5e3393f98..e6c0a8b405 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py @@ -51,4 +51,5 @@ def sample_analyze_org_policy_governed_assets(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py index 2158693027..248661ce34 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py @@ -51,4 +51,5 @@ async def sample_analyze_org_policy_governed_containers(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py index 4f52cbdb91..0f351b0162 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py @@ -51,4 +51,5 @@ def sample_analyze_org_policy_governed_containers(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py index 185ec5b550..2764fd7a51 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -49,4 +49,5 @@ async def sample_batch_get_assets_history(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py index a4dbf5d898..f02b9bdbf9 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -49,4 +49,5 @@ def sample_batch_get_assets_history(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py index 2a2112e96b..d1bf79824e 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py @@ -41,7 +41,7 @@ async def sample_batch_get_effective_iam_policies(): # Initialize request argument(s) request = asset_v1.BatchGetEffectiveIamPoliciesRequest( scope="scope_value", - names=['names_value1', 'names_value2'], + names=["names_value1", "names_value2"], ) # Make the request @@ -50,4 +50,5 @@ async def sample_batch_get_effective_iam_policies(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py index 03874bb741..56409aeefe 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py @@ -41,7 +41,7 @@ def sample_batch_get_effective_iam_policies(): # Initialize request argument(s) request = asset_v1.BatchGetEffectiveIamPoliciesRequest( scope="scope_value", - names=['names_value1', 'names_value2'], + names=["names_value1", "names_value2"], ) # Make the request @@ -50,4 +50,5 @@ def sample_batch_get_effective_iam_policies(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py index 0bcd5fb1e5..c57f865ef6 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -54,4 +54,5 @@ async def sample_create_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateFeed_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py index 2f8e112962..bbc4716203 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -54,4 +54,5 @@ def sample_create_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateFeed_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py index a8b64d40ec..cab2f1d1e9 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py @@ -50,4 +50,5 @@ async def sample_create_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateSavedQuery_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py index 53cb726f86..c56a08171a 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py @@ -50,4 +50,5 @@ def sample_create_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index 61934e88f0..69eb824d4f 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -57,4 +57,5 @@ async def sample_export_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ExportAssets_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py index 696b353c37..fbb690ae9c 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -57,4 +57,5 @@ def sample_export_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ExportAssets_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py index 884b7d0034..78cc0df1b6 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -49,4 +49,5 @@ async def sample_get_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetFeed_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py index 712a533b71..fea5f82707 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -49,4 +49,5 @@ def sample_get_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetFeed_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py index a24420bb5f..9bfa1bf2cc 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py @@ -49,4 +49,5 @@ async def sample_get_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetSavedQuery_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py index 291b88589c..b2ffea2520 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py @@ -49,4 +49,5 @@ def sample_get_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetSavedQuery_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py index 6afb977deb..a3375bb538 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -50,4 +50,5 @@ async def sample_list_assets(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListAssets_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py index f7fc8801a1..0062409e4c 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -50,4 +50,5 @@ def sample_list_assets(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListAssets_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py index 64284321eb..5b641d54a7 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -49,4 +49,5 @@ async def sample_list_feeds(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ListFeeds_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py index b9733d98cd..0de1d2dade 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -49,4 +49,5 @@ def sample_list_feeds(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ListFeeds_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py index 3d5f2095cb..4ced4cc6d8 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py @@ -50,4 +50,5 @@ async def sample_list_saved_queries(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListSavedQueries_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py index 9c096d9e60..300205d16e 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py @@ -50,4 +50,5 @@ def sample_list_saved_queries(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListSavedQueries_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py index baaade80cd..3fce0d2ec1 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py @@ -50,4 +50,5 @@ async def sample_query_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_QueryAssets_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py index a52f6818bd..948136fa52 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py @@ -50,4 +50,5 @@ def sample_query_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_QueryAssets_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py index a62518e665..6f0bc290b7 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -50,4 +50,5 @@ async def sample_search_all_iam_policies(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py index 71536d46c5..e41d2480ef 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -50,4 +50,5 @@ def sample_search_all_iam_policies(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py index aa480039bd..adcfe0e985 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -50,4 +50,5 @@ async def sample_search_all_resources(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllResources_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py index 26ea1bffc4..71db7ae3bb 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -50,4 +50,5 @@ def sample_search_all_resources(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllResources_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py index 23cc1a6f0d..b04f10e9e0 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -52,4 +52,5 @@ async def sample_update_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateFeed_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py index 36745fa8c9..48c02911cd 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -52,4 +52,5 @@ def sample_update_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateFeed_sync] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py index 269098a257..4ac0c5b1f9 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py @@ -39,8 +39,7 @@ async def sample_update_saved_query(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) + request = asset_v1.UpdateSavedQueryRequest() # Make the request response = await client.update_saved_query(request=request) @@ -48,4 +47,5 @@ async def sample_update_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] diff --git a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py index fb8864825e..5e95a9d876 100755 --- a/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py +++ b/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py @@ -39,8 +39,7 @@ def sample_update_saved_query(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) + request = asset_v1.UpdateSavedQueryRequest() # Make the request response = client.update_saved_query(request=request) @@ -48,4 +47,5 @@ def sample_update_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] diff --git a/tests/integration/goldens/asset/setup.py b/tests/integration/goldens/asset/setup.py index db0a03c590..9b465e1080 100755 --- a/tests/integration/goldens/asset/setup.py +++ b/tests/integration/goldens/asset/setup.py @@ -17,20 +17,20 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-asset' +name = "google-cloud-asset" description = "Google Cloud Asset API client library" version = None -with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: +with open(os.path.join(package_root, "google/cloud/asset/gapic_version.py")) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -52,8 +52,7 @@ "google-cloud-os-config >= 1.0.0, <2.0.0", "grpc-google-iam-v1 >= 0.14.0, <1.0.0", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/tests/integration/goldens/asset/tests/__init__.py b/tests/integration/goldens/asset/tests/__init__.py index 191773d557..cbf94b283c 100755 --- a/tests/integration/goldens/asset/tests/__init__.py +++ b/tests/integration/goldens/asset/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/integration/goldens/asset/tests/unit/__init__.py b/tests/integration/goldens/asset/tests/unit/__init__.py index 191773d557..cbf94b283c 100755 --- a/tests/integration/goldens/asset/tests/unit/__init__.py +++ b/tests/integration/goldens/asset/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/tests/integration/goldens/asset/tests/unit/gapic/__init__.py index 191773d557..cbf94b283c 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/__init__.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py index 191773d557..cbf94b283c 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index bd17439996..17bae13533 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,56 +22,58 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.asset_v1.services.asset_service import AssetServiceAsyncClient -from google.cloud.asset_v1.services.asset_service import AssetServiceClient -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.services.asset_service import transports -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore -import google.auth - +from google.cloud.asset_v1.services.asset_service import ( + AssetServiceAsyncClient, + AssetServiceClient, + pagers, + transports, +) +from google.cloud.asset_v1.types import asset_service, assets CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -85,9 +88,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -95,17 +100,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -116,12 +131,24 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert AssetServiceClient._get_default_mtls_endpoint(None) is None - assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + def test__read_environment_variables(): assert AssetServiceClient._read_environment_variables() == (False, "auto", None) @@ -143,16 +170,24 @@ def test__read_environment_variables(): ) else: assert AssetServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AssetServiceClient._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssetServiceClient._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssetServiceClient._read_environment_variables() == (False, "always", None) + assert AssetServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): assert AssetServiceClient._read_environment_variables() == (False, "auto", None) @@ -160,10 +195,17 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") + assert AssetServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -172,7 +214,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert AssetServiceClient._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -180,7 +224,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -192,7 +238,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -204,7 +252,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -216,7 +266,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -231,83 +283,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): AssetServiceClient._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert AssetServiceClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert AssetServiceClient._get_client_cert_source(None, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AssetServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AssetServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) + +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + AssetServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AssetServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") + == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AssetServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + AssetServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert AssetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssetServiceClient._get_universe_domain(None, None) == AssetServiceClient._DEFAULT_UNIVERSE + assert ( + AssetServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AssetServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AssetServiceClient._get_universe_domain(None, None) + == AssetServiceClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: AssetServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -323,7 +459,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -336,14 +473,20 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssetServiceClient, "grpc"), + (AssetServiceAsyncClient, "grpc_asyncio"), + (AssetServiceClient, "rest"), + ], +) def test_asset_service_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -351,52 +494,68 @@ def test_asset_service_client_from_service_account_info(client_class, transport_ assert isinstance(client, client_class) assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' + "cloudasset.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssetServiceGrpcTransport, "grpc"), - (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AssetServiceGrpcTransport, "grpc"), + (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AssetServiceRestTransport, "rest"), + ], +) +def test_asset_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssetServiceClient, "grpc"), + (AssetServiceAsyncClient, "grpc_asyncio"), + (AssetServiceClient, "rest"), + ], +) def test_asset_service_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' + "cloudasset.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com" ) @@ -412,30 +571,45 @@ def test_asset_service_client_get_transport_class(): assert transport == transports.AssetServiceGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) +def test_asset_service_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(AssetServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: + with mock.patch.object(AssetServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -453,13 +627,15 @@ def test_asset_service_client_client_options(client_class, transport_class, tran # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -471,7 +647,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -491,17 +667,22 @@ def test_asset_service_client_client_options(client_class, transport_class, tran with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -510,48 +691,82 @@ def test_asset_service_client_client_options(client_class, transport_class, tran api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_asset_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -570,12 +785,22 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -596,15 +821,22 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -614,19 +846,27 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans ) -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +@pytest.mark.parametrize("client_class", [AssetServiceClient, AssetServiceAsyncClient]) +@mock.patch.object( + AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient) +) +@mock.patch.object( + AssetServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssetServiceAsyncClient), +) def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -634,18 +874,25 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -682,23 +929,24 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -729,23 +977,24 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -761,16 +1010,28 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -780,27 +1041,48 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) +@pytest.mark.parametrize("client_class", [AssetServiceClient, AssetServiceAsyncClient]) +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) def test_asset_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -823,11 +1105,19 @@ def test_asset_service_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -835,27 +1125,40 @@ def test_asset_service_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), + ], +) +def test_asset_service_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -864,24 +1167,40 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), -]) -def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssetServiceClient, + transports.AssetServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), + ], +) +def test_asset_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -890,12 +1209,13 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran api_audience=None, ) + def test_asset_service_client_client_options_from_dict(): - with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None - client = AssetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) + client = AssetServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, @@ -909,23 +1229,38 @@ def test_asset_service_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_asset_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssetServiceClient, + transports.AssetServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_asset_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -952,9 +1287,7 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, default_host="cloudasset.googleapis.com", ssl_credentials=None, @@ -965,11 +1298,14 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran ) -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) -def test_export_assets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ExportAssetsRequest, + dict, + ], +) +def test_export_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -980,11 +1316,9 @@ def test_export_assets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.export_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1002,28 +1336,29 @@ def test_export_assets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ExportAssetsRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.export_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ExportAssetsRequest( - parent='parent_value', + parent="parent_value", ) + def test_export_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1042,7 +1377,9 @@ def test_export_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc request = {} client.export_assets(request) @@ -1061,8 +1398,11 @@ def test_export_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_export_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1076,12 +1416,17 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.export_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.export_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.export_assets + ] = mock_rpc request = {} await client.export_assets(request) @@ -1100,8 +1445,11 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): +async def test_export_assets_async( + transport: str = "grpc_asyncio", request_type=asset_service.ExportAssetsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1112,12 +1460,10 @@ async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.export_assets(request) @@ -1135,6 +1481,7 @@ async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type async def test_export_assets_async_from_dict(): await test_export_assets_async(request_type=dict) + def test_export_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1144,13 +1491,11 @@ def test_export_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.export_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1161,9 +1506,9 @@ def test_export_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1176,13 +1521,13 @@ async def test_export_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.export_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1193,16 +1538,19 @@ async def test_export_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListAssetsRequest, + dict, + ], +) +def test_list_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1213,12 +1561,10 @@ def test_list_assets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_assets(request) @@ -1230,7 +1576,7 @@ def test_list_assets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_assets_non_empty_request_with_auto_populated_field(): @@ -1238,30 +1584,31 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1280,7 +1627,9 @@ def test_list_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc request = {} client.list_assets(request) @@ -1294,8 +1643,11 @@ def test_list_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1309,12 +1661,17 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_assets + ] = mock_rpc request = {} await client.list_assets(request) @@ -1328,8 +1685,11 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): +async def test_list_assets_async( + transport: str = "grpc_asyncio", request_type=asset_service.ListAssetsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1340,13 +1700,13 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1357,13 +1717,14 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_assets_async_from_dict(): await test_list_assets_async(request_type=dict) + def test_list_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1373,12 +1734,10 @@ def test_list_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: call.return_value = asset_service.ListAssetsResponse() client.list_assets(request) @@ -1390,9 +1749,9 @@ def test_list_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1405,13 +1764,13 @@ async def test_list_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse() + ) await client.list_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1422,9 +1781,9 @@ async def test_list_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_assets_flattened(): @@ -1433,15 +1792,13 @@ def test_list_assets_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListAssetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_assets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1449,7 +1806,7 @@ def test_list_assets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1463,9 +1820,10 @@ def test_list_assets_flattened_error(): with pytest.raises(ValueError): client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_assets_flattened_async(): client = AssetServiceAsyncClient( @@ -1473,17 +1831,17 @@ async def test_list_assets_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListAssetsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_assets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1491,9 +1849,10 @@ async def test_list_assets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_assets_flattened_error_async(): client = AssetServiceAsyncClient( @@ -1505,7 +1864,7 @@ async def test_list_assets_flattened_error_async(): with pytest.raises(ValueError): await client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1516,9 +1875,7 @@ def test_list_assets_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1527,17 +1884,17 @@ def test_list_assets_pager(transport_name: str = "grpc"): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1552,9 +1909,7 @@ def test_list_assets_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}, retry=retry, timeout=timeout) @@ -1564,8 +1919,9 @@ def test_list_assets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) + assert all(isinstance(i, assets.Asset) for i in results) + + def test_list_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1573,9 +1929,7 @@ def test_list_assets_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1584,17 +1938,17 @@ def test_list_assets_pages(transport_name: str = "grpc"): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1605,9 +1959,10 @@ def test_list_assets_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_assets_async_pager(): client = AssetServiceAsyncClient( @@ -1616,8 +1971,8 @@ async def test_list_assets_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_assets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1626,17 +1981,17 @@ async def test_list_assets_async_pager(): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1646,15 +2001,16 @@ async def test_list_assets_async_pager(): ), RuntimeError, ) - async_pager = await client.list_assets(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_assets( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, assets.Asset) - for i in responses) + assert all(isinstance(i, assets.Asset) for i in responses) @pytest.mark.asyncio @@ -1665,8 +2021,8 @@ async def test_list_assets_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_assets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1675,17 +2031,17 @@ async def test_list_assets_async_pages(): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1698,18 +2054,22 @@ async def test_list_assets_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_assets(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) -def test_batch_get_assets_history(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetAssetsHistoryRequest, + dict, + ], +) +def test_batch_get_assets_history(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1721,11 +2081,10 @@ def test_batch_get_assets_history(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.BatchGetAssetsHistoryResponse( - ) + call.return_value = asset_service.BatchGetAssetsHistoryResponse() response = client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. @@ -1743,28 +2102,31 @@ def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.batch_get_assets_history), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.batch_get_assets_history(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', + parent="parent_value", ) + def test_batch_get_assets_history_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1779,12 +2141,19 @@ def test_batch_get_assets_history_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_assets_history in client._transport._wrapped_methods + assert ( + client._transport.batch_get_assets_history + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_assets_history + ] = mock_rpc request = {} client.batch_get_assets_history(request) @@ -1797,8 +2166,11 @@ def test_batch_get_assets_history_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_batch_get_assets_history_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1812,12 +2184,17 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.batch_get_assets_history in client._client._transport._wrapped_methods + assert ( + client._client._transport.batch_get_assets_history + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_assets_history] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_assets_history + ] = mock_rpc request = {} await client.batch_get_assets_history(request) @@ -1831,8 +2208,12 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): +async def test_batch_get_assets_history_async( + transport: str = "grpc_asyncio", + request_type=asset_service.BatchGetAssetsHistoryRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1844,11 +2225,12 @@ async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetAssetsHistoryResponse() + ) response = await client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. @@ -1865,6 +2247,7 @@ async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', r async def test_batch_get_assets_history_async_from_dict(): await test_batch_get_assets_history_async(request_type=dict) + def test_batch_get_assets_history_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1874,12 +2257,12 @@ def test_batch_get_assets_history_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: call.return_value = asset_service.BatchGetAssetsHistoryResponse() client.batch_get_assets_history(request) @@ -1891,9 +2274,9 @@ def test_batch_get_assets_history_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1906,13 +2289,15 @@ async def test_batch_get_assets_history_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse()) + type(client.transport.batch_get_assets_history), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetAssetsHistoryResponse() + ) await client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. @@ -1923,16 +2308,19 @@ async def test_batch_get_assets_history_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateFeedRequest, + dict, + ], +) +def test_create_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1943,16 +2331,14 @@ def test_create_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.create_feed(request) @@ -1964,11 +2350,11 @@ def test_create_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_create_feed_non_empty_request_with_auto_populated_field(): @@ -1976,30 +2362,31 @@ def test_create_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', + parent="parent_value", + feed_id="feed_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', + parent="parent_value", + feed_id="feed_id_value", ) + def test_create_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2018,7 +2405,9 @@ def test_create_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc request = {} client.create_feed(request) @@ -2032,8 +2421,11 @@ def test_create_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_feed_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2047,12 +2439,17 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_feed + ] = mock_rpc request = {} await client.create_feed(request) @@ -2066,8 +2463,11 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): +async def test_create_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.CreateFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2078,17 +2478,17 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) response = await client.create_feed(request) # Establish that the underlying gRPC stub method was called. @@ -2099,17 +2499,18 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio async def test_create_feed_async_from_dict(): await test_create_feed_async(request_type=dict) + def test_create_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2119,12 +2520,10 @@ def test_create_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: call.return_value = asset_service.Feed() client.create_feed(request) @@ -2136,9 +2535,9 @@ def test_create_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2151,12 +2550,10 @@ async def test_create_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) await client.create_feed(request) @@ -2168,9 +2565,9 @@ async def test_create_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_feed_flattened(): @@ -2179,15 +2576,13 @@ def test_create_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_feed( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2195,7 +2590,7 @@ def test_create_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2209,9 +2604,10 @@ def test_create_feed_flattened_error(): with pytest.raises(ValueError): client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_create_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -2219,9 +2615,7 @@ async def test_create_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() @@ -2229,7 +2623,7 @@ async def test_create_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_feed( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2237,9 +2631,10 @@ async def test_create_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2251,15 +2646,18 @@ async def test_create_feed_flattened_error_async(): with pytest.raises(ValueError): await client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetFeedRequest, + dict, + ], +) +def test_get_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2270,16 +2668,14 @@ def test_get_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.get_feed(request) @@ -2291,11 +2687,11 @@ def test_get_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_get_feed_non_empty_request_with_auto_populated_field(): @@ -2303,28 +2699,29 @@ def test_get_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.GetFeedRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetFeedRequest( - name='name_value', + name="name_value", ) + def test_get_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2343,7 +2740,9 @@ def test_get_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc request = {} client.get_feed(request) @@ -2357,6 +2756,7 @@ def test_get_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2372,12 +2772,17 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_feed + ] = mock_rpc request = {} await client.get_feed(request) @@ -2391,8 +2796,11 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): +async def test_get_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.GetFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2403,17 +2811,17 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) response = await client.get_feed(request) # Establish that the underlying gRPC stub method was called. @@ -2424,17 +2832,18 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio async def test_get_feed_async_from_dict(): await test_get_feed_async(request_type=dict) + def test_get_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2444,12 +2853,10 @@ def test_get_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: call.return_value = asset_service.Feed() client.get_feed(request) @@ -2461,9 +2868,9 @@ def test_get_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2476,12 +2883,10 @@ async def test_get_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) await client.get_feed(request) @@ -2493,9 +2898,9 @@ async def test_get_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_feed_flattened(): @@ -2504,15 +2909,13 @@ def test_get_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2520,7 +2923,7 @@ def test_get_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2534,9 +2937,10 @@ def test_get_feed_flattened_error(): with pytest.raises(ValueError): client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -2544,9 +2948,7 @@ async def test_get_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() @@ -2554,7 +2956,7 @@ async def test_get_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2562,9 +2964,10 @@ async def test_get_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2576,15 +2979,18 @@ async def test_get_feed_flattened_error_async(): with pytest.raises(ValueError): await client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListFeedsRequest, + dict, + ], +) +def test_list_feeds(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2595,12 +3001,9 @@ def test_list_feeds(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse( - ) + call.return_value = asset_service.ListFeedsResponse() response = client.list_feeds(request) # Establish that the underlying gRPC stub method was called. @@ -2618,28 +3021,29 @@ def test_list_feeds_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListFeedsRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_feeds(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListFeedsRequest( - parent='parent_value', + parent="parent_value", ) + def test_list_feeds_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2658,7 +3062,9 @@ def test_list_feeds_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc request = {} client.list_feeds(request) @@ -2672,6 +3078,7 @@ def test_list_feeds_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2687,12 +3094,17 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_feeds in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_feeds + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_feeds + ] = mock_rpc request = {} await client.list_feeds(request) @@ -2706,8 +3118,11 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): +async def test_list_feeds_async( + transport: str = "grpc_asyncio", request_type=asset_service.ListFeedsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2718,12 +3133,11 @@ async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=as request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) response = await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. @@ -2740,6 +3154,7 @@ async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=as async def test_list_feeds_async_from_dict(): await test_list_feeds_async(request_type=dict) + def test_list_feeds_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2749,12 +3164,10 @@ def test_list_feeds_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: call.return_value = asset_service.ListFeedsResponse() client.list_feeds(request) @@ -2766,9 +3179,9 @@ def test_list_feeds_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2781,13 +3194,13 @@ async def test_list_feeds_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. @@ -2798,9 +3211,9 @@ async def test_list_feeds_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_feeds_flattened(): @@ -2809,15 +3222,13 @@ def test_list_feeds_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListFeedsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_feeds( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2825,7 +3236,7 @@ def test_list_feeds_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2839,9 +3250,10 @@ def test_list_feeds_flattened_error(): with pytest.raises(ValueError): client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_feeds_flattened_async(): client = AssetServiceAsyncClient( @@ -2849,17 +3261,17 @@ async def test_list_feeds_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListFeedsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_feeds( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2867,9 +3279,10 @@ async def test_list_feeds_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_feeds_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2881,15 +3294,18 @@ async def test_list_feeds_flattened_error_async(): with pytest.raises(ValueError): await client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateFeedRequest, + dict, + ], +) +def test_update_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2900,16 +3316,14 @@ def test_update_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.update_feed(request) @@ -2921,11 +3335,11 @@ def test_update_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_update_feed_non_empty_request_with_auto_populated_field(): @@ -2933,25 +3347,24 @@ def test_update_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = asset_service.UpdateFeedRequest( - ) + request = asset_service.UpdateFeedRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest( - ) + assert args[0] == asset_service.UpdateFeedRequest() + def test_update_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2971,7 +3384,9 @@ def test_update_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc request = {} client.update_feed(request) @@ -2985,8 +3400,11 @@ def test_update_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_feed_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3000,12 +3418,17 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_feed + ] = mock_rpc request = {} await client.update_feed(request) @@ -3019,8 +3442,11 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): +async def test_update_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.UpdateFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3031,17 +3457,17 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) response = await client.update_feed(request) # Establish that the underlying gRPC stub method was called. @@ -3052,17 +3478,18 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio async def test_update_feed_async_from_dict(): await test_update_feed_async(request_type=dict) + def test_update_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3072,12 +3499,10 @@ def test_update_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'name_value' + request.feed.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: call.return_value = asset_service.Feed() client.update_feed(request) @@ -3089,9 +3514,9 @@ def test_update_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "feed.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3104,12 +3529,10 @@ async def test_update_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'name_value' + request.feed.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) await client.update_feed(request) @@ -3121,9 +3544,9 @@ async def test_update_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "feed.name=name_value", + ) in kw["metadata"] def test_update_feed_flattened(): @@ -3132,15 +3555,13 @@ def test_update_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_feed( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3148,7 +3569,7 @@ def test_update_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') + mock_val = asset_service.Feed(name="name_value") assert arg == mock_val @@ -3162,9 +3583,10 @@ def test_update_feed_flattened_error(): with pytest.raises(ValueError): client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) + @pytest.mark.asyncio async def test_update_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -3172,9 +3594,7 @@ async def test_update_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() @@ -3182,7 +3602,7 @@ async def test_update_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_feed( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3190,9 +3610,10 @@ async def test_update_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') + mock_val = asset_service.Feed(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_update_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -3204,15 +3625,18 @@ async def test_update_feed_flattened_error_async(): with pytest.raises(ValueError): await client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteFeedRequest, + dict, + ], +) +def test_delete_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3223,9 +3647,7 @@ def test_delete_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_feed(request) @@ -3245,28 +3667,29 @@ def test_delete_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.DeleteFeedRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest( - name='name_value', + name="name_value", ) + def test_delete_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3285,7 +3708,9 @@ def test_delete_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc request = {} client.delete_feed(request) @@ -3299,8 +3724,11 @@ def test_delete_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_feed_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3314,12 +3742,17 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_feed + ] = mock_rpc request = {} await client.delete_feed(request) @@ -3333,8 +3766,11 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): +async def test_delete_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.DeleteFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3345,9 +3781,7 @@ async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_feed(request) @@ -3366,6 +3800,7 @@ async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_delete_feed_async_from_dict(): await test_delete_feed_async(request_type=dict) + def test_delete_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3375,12 +3810,10 @@ def test_delete_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: call.return_value = None client.delete_feed(request) @@ -3392,9 +3825,9 @@ def test_delete_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3407,12 +3840,10 @@ async def test_delete_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_feed(request) @@ -3424,9 +3855,9 @@ async def test_delete_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_feed_flattened(): @@ -3435,15 +3866,13 @@ def test_delete_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3451,7 +3880,7 @@ def test_delete_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -3465,9 +3894,10 @@ def test_delete_feed_flattened_error(): with pytest.raises(ValueError): client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -3475,9 +3905,7 @@ async def test_delete_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3485,7 +3913,7 @@ async def test_delete_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3493,9 +3921,10 @@ async def test_delete_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -3507,15 +3936,18 @@ async def test_delete_feed_flattened_error_async(): with pytest.raises(ValueError): await client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllResourcesRequest, + dict, + ], +) +def test_search_all_resources(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3527,11 +3959,11 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.search_all_resources(request) @@ -3543,7 +3975,7 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_search_all_resources_non_empty_request_with_auto_populated_field(): @@ -3551,34 +3983,37 @@ def test_search_all_resources_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.search_all_resources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.search_all_resources(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) + def test_search_all_resources_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3593,12 +4028,18 @@ def test_search_all_resources_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods + assert ( + client._transport.search_all_resources in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_all_resources + ] = mock_rpc request = {} client.search_all_resources(request) @@ -3611,8 +4052,11 @@ def test_search_all_resources_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_search_all_resources_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3626,12 +4070,17 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.search_all_resources in client._client._transport._wrapped_methods + assert ( + client._client._transport.search_all_resources + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.search_all_resources + ] = mock_rpc request = {} await client.search_all_resources(request) @@ -3645,8 +4094,12 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): +async def test_search_all_resources_async( + transport: str = "grpc_asyncio", + request_type=asset_service.SearchAllResourcesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3658,12 +4111,14 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. @@ -3674,13 +4129,14 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_search_all_resources_async_from_dict(): await test_search_all_resources_async(request_type=dict) + def test_search_all_resources_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3690,12 +4146,12 @@ def test_search_all_resources_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: call.return_value = asset_service.SearchAllResourcesResponse() client.search_all_resources(request) @@ -3707,9 +4163,9 @@ def test_search_all_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3722,13 +4178,15 @@ async def test_search_all_resources_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) + type(client.transport.search_all_resources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse() + ) await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. @@ -3739,9 +4197,9 @@ async def test_search_all_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_search_all_resources_flattened(): @@ -3751,16 +4209,16 @@ def test_search_all_resources_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllResourcesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) # Establish that the underlying call was made with the expected @@ -3768,13 +4226,13 @@ def test_search_all_resources_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val arg = args[0].asset_types - mock_val = ['asset_types_value'] + mock_val = ["asset_types_value"] assert arg == mock_val @@ -3788,11 +4246,12 @@ def test_search_all_resources_flattened_error(): with pytest.raises(ValueError): client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) + @pytest.mark.asyncio async def test_search_all_resources_flattened_async(): client = AssetServiceAsyncClient( @@ -3801,18 +4260,20 @@ async def test_search_all_resources_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllResourcesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) # Establish that the underlying call was made with the expected @@ -3820,15 +4281,16 @@ async def test_search_all_resources_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val arg = args[0].asset_types - mock_val = ['asset_types_value'] + mock_val = ["asset_types_value"] assert arg == mock_val + @pytest.mark.asyncio async def test_search_all_resources_flattened_error_async(): client = AssetServiceAsyncClient( @@ -3840,9 +4302,9 @@ async def test_search_all_resources_flattened_error_async(): with pytest.raises(ValueError): await client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) @@ -3854,8 +4316,8 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -3864,17 +4326,17 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -3889,9 +4351,7 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_resources(request={}, retry=retry, timeout=timeout) @@ -3901,8 +4361,9 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) + assert all(isinstance(i, assets.ResourceSearchResult) for i in results) + + def test_search_all_resources_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3911,8 +4372,8 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -3921,17 +4382,17 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -3942,9 +4403,10 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.search_all_resources(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_search_all_resources_async_pager(): client = AssetServiceAsyncClient( @@ -3953,8 +4415,10 @@ async def test_search_all_resources_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_resources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -3963,17 +4427,17 @@ async def test_search_all_resources_async_pager(): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -3983,15 +4447,16 @@ async def test_search_all_resources_async_pager(): ), RuntimeError, ) - async_pager = await client.search_all_resources(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.search_all_resources( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in responses) + assert all(isinstance(i, assets.ResourceSearchResult) for i in responses) @pytest.mark.asyncio @@ -4002,8 +4467,10 @@ async def test_search_all_resources_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_resources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -4012,17 +4479,17 @@ async def test_search_all_resources_async_pages(): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -4035,18 +4502,22 @@ async def test_search_all_resources_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.search_all_resources(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllIamPoliciesRequest, + dict, + ], +) +def test_search_all_iam_policies(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4058,11 +4529,11 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.search_all_iam_policies(request) @@ -4074,7 +4545,7 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): @@ -4082,34 +4553,37 @@ def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.search_all_iam_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.search_all_iam_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) + def test_search_all_iam_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4124,12 +4598,19 @@ def test_search_all_iam_policies_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.search_all_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_all_iam_policies + ] = mock_rpc request = {} client.search_all_iam_policies(request) @@ -4142,8 +4623,11 @@ def test_search_all_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_search_all_iam_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4157,12 +4641,17 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.search_all_iam_policies in client._client._transport._wrapped_methods + assert ( + client._client._transport.search_all_iam_policies + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.search_all_iam_policies + ] = mock_rpc request = {} await client.search_all_iam_policies(request) @@ -4176,8 +4665,12 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): +async def test_search_all_iam_policies_async( + transport: str = "grpc_asyncio", + request_type=asset_service.SearchAllIamPoliciesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4189,12 +4682,14 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -4205,13 +4700,14 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_search_all_iam_policies_async_from_dict(): await test_search_all_iam_policies_async(request_type=dict) + def test_search_all_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4221,12 +4717,12 @@ def test_search_all_iam_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: call.return_value = asset_service.SearchAllIamPoliciesResponse() client.search_all_iam_policies(request) @@ -4238,9 +4734,9 @@ def test_search_all_iam_policies_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4253,13 +4749,15 @@ async def test_search_all_iam_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) + type(client.transport.search_all_iam_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse() + ) await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -4270,9 +4768,9 @@ async def test_search_all_iam_policies_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_search_all_iam_policies_flattened(): @@ -4282,15 +4780,15 @@ def test_search_all_iam_policies_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllIamPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_all_iam_policies( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) # Establish that the underlying call was made with the expected @@ -4298,10 +4796,10 @@ def test_search_all_iam_policies_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val @@ -4315,10 +4813,11 @@ def test_search_all_iam_policies_flattened_error(): with pytest.raises(ValueError): client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) + @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_async(): client = AssetServiceAsyncClient( @@ -4327,17 +4826,19 @@ async def test_search_all_iam_policies_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllIamPoliciesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.search_all_iam_policies( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) # Establish that the underlying call was made with the expected @@ -4345,12 +4846,13 @@ async def test_search_all_iam_policies_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val + @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_error_async(): client = AssetServiceAsyncClient( @@ -4362,8 +4864,8 @@ async def test_search_all_iam_policies_flattened_error_async(): with pytest.raises(ValueError): await client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) @@ -4375,8 +4877,8 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4385,17 +4887,17 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4410,9 +4912,7 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_iam_policies(request={}, retry=retry, timeout=timeout) @@ -4422,8 +4922,9 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) + assert all(isinstance(i, assets.IamPolicySearchResult) for i in results) + + def test_search_all_iam_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4432,8 +4933,8 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4442,17 +4943,17 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4463,9 +4964,10 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.search_all_iam_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_search_all_iam_policies_async_pager(): client = AssetServiceAsyncClient( @@ -4474,8 +4976,10 @@ async def test_search_all_iam_policies_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_iam_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4484,17 +4988,17 @@ async def test_search_all_iam_policies_async_pager(): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4504,15 +5008,16 @@ async def test_search_all_iam_policies_async_pager(): ), RuntimeError, ) - async_pager = await client.search_all_iam_policies(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.search_all_iam_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in responses) + assert all(isinstance(i, assets.IamPolicySearchResult) for i in responses) @pytest.mark.asyncio @@ -4523,8 +5028,10 @@ async def test_search_all_iam_policies_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_iam_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4533,17 +5040,17 @@ async def test_search_all_iam_policies_async_pages(): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4556,18 +5063,22 @@ async def test_search_all_iam_policies_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.search_all_iam_policies(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) -def test_analyze_iam_policy(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyRequest, + dict, + ], +) +def test_analyze_iam_policy(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4579,8 +5090,8 @@ def test_analyze_iam_policy(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeIamPolicyResponse( fully_explored=True, @@ -4603,28 +5114,31 @@ def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_iam_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_iam_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) + def test_analyze_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4639,12 +5153,18 @@ def test_analyze_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_iam_policy + ] = mock_rpc request = {} client.analyze_iam_policy(request) @@ -4657,8 +5177,11 @@ def test_analyze_iam_policy_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4672,12 +5195,17 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_iam_policy in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_iam_policy + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_iam_policy + ] = mock_rpc request = {} await client.analyze_iam_policy(request) @@ -4691,8 +5219,11 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): +async def test_analyze_iam_policy_async( + transport: str = "grpc_asyncio", request_type=asset_service.AnalyzeIamPolicyRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4704,12 +5235,14 @@ async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + ) response = await client.analyze_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -4727,6 +5260,7 @@ async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request async def test_analyze_iam_policy_async_from_dict(): await test_analyze_iam_policy_async(request_type=dict) + def test_analyze_iam_policy_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4736,12 +5270,12 @@ def test_analyze_iam_policy_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: call.return_value = asset_service.AnalyzeIamPolicyResponse() client.analyze_iam_policy(request) @@ -4753,9 +5287,9 @@ def test_analyze_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4768,13 +5302,15 @@ async def test_analyze_iam_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse()) + type(client.transport.analyze_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeIamPolicyResponse() + ) await client.analyze_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -4785,16 +5321,19 @@ async def test_analyze_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) -def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, + ], +) +def test_analyze_iam_policy_longrunning(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4806,10 +5345,10 @@ def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. @@ -4827,28 +5366,31 @@ def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_fi # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_iam_policy_longrunning(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) + def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4863,12 +5405,19 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy_longrunning + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_iam_policy_longrunning + ] = mock_rpc request = {} client.analyze_iam_policy_longrunning(request) @@ -4886,8 +5435,11 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4901,12 +5453,17 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_iam_policy_longrunning in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_iam_policy_longrunning + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy_longrunning] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_iam_policy_longrunning + ] = mock_rpc request = {} await client.analyze_iam_policy_longrunning(request) @@ -4925,8 +5482,12 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +async def test_analyze_iam_policy_longrunning_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeIamPolicyLongrunningRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4938,11 +5499,11 @@ async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_async # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.analyze_iam_policy_longrunning(request) @@ -4960,6 +5521,7 @@ async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_async async def test_analyze_iam_policy_longrunning_async_from_dict(): await test_analyze_iam_policy_longrunning_async(request_type=dict) + def test_analyze_iam_policy_longrunning_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4969,13 +5531,13 @@ def test_analyze_iam_policy_longrunning_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. @@ -4986,9 +5548,9 @@ def test_analyze_iam_policy_longrunning_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5001,13 +5563,15 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. @@ -5018,16 +5582,19 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) -def test_analyze_move(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeMoveRequest, + dict, + ], +) +def test_analyze_move(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5038,12 +5605,9 @@ def test_analyze_move(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeMoveResponse( - ) + call.return_value = asset_service.AnalyzeMoveResponse() response = client.analyze_move(request) # Establish that the underlying gRPC stub method was called. @@ -5061,30 +5625,31 @@ def test_analyze_move_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', + resource="resource_value", + destination_parent="destination_parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_move(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', + resource="resource_value", + destination_parent="destination_parent_value", ) + def test_analyze_move_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5103,7 +5668,9 @@ def test_analyze_move_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc request = {} client.analyze_move(request) @@ -5117,8 +5684,11 @@ def test_analyze_move_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_move_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5132,12 +5702,17 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_move in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_move + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_move] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_move + ] = mock_rpc request = {} await client.analyze_move(request) @@ -5151,8 +5726,11 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): +async def test_analyze_move_async( + transport: str = "grpc_asyncio", request_type=asset_service.AnalyzeMoveRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5163,12 +5741,11 @@ async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeMoveResponse() + ) response = await client.analyze_move(request) # Establish that the underlying gRPC stub method was called. @@ -5185,6 +5762,7 @@ async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type= async def test_analyze_move_async_from_dict(): await test_analyze_move_async(request_type=dict) + def test_analyze_move_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5194,12 +5772,10 @@ def test_analyze_move_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeMoveRequest() - request.resource = 'resource_value' + request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: call.return_value = asset_service.AnalyzeMoveResponse() client.analyze_move(request) @@ -5211,9 +5787,9 @@ def test_analyze_move_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5226,13 +5802,13 @@ async def test_analyze_move_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeMoveRequest() - request.resource = 'resource_value' + request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse()) + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeMoveResponse() + ) await client.analyze_move(request) # Establish that the underlying gRPC stub method was called. @@ -5243,16 +5819,19 @@ async def test_analyze_move_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) -def test_query_assets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.QueryAssetsRequest, + dict, + ], +) +def test_query_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5263,12 +5842,10 @@ def test_query_assets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', + job_reference="job_reference_value", done=True, ) response = client.query_assets(request) @@ -5281,7 +5858,7 @@ def test_query_assets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -5290,34 +5867,35 @@ def test_query_assets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', + parent="parent_value", + statement="statement_value", + job_reference="job_reference_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.query_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', + parent="parent_value", + statement="statement_value", + job_reference="job_reference_value", + page_token="page_token_value", ) + def test_query_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5336,7 +5914,9 @@ def test_query_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc request = {} client.query_assets(request) @@ -5350,8 +5930,11 @@ def test_query_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_query_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5365,12 +5948,17 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.query_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.query_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.query_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.query_assets + ] = mock_rpc request = {} await client.query_assets(request) @@ -5384,8 +5972,11 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): +async def test_query_assets_async( + transport: str = "grpc_asyncio", request_type=asset_service.QueryAssetsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5396,14 +5987,14 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.QueryAssetsResponse( + job_reference="job_reference_value", + done=True, + ) + ) response = await client.query_assets(request) # Establish that the underlying gRPC stub method was called. @@ -5414,7 +6005,7 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -5422,6 +6013,7 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= async def test_query_assets_async_from_dict(): await test_query_assets_async(request_type=dict) + def test_query_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5431,12 +6023,10 @@ def test_query_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.QueryAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: call.return_value = asset_service.QueryAssetsResponse() client.query_assets(request) @@ -5448,9 +6038,9 @@ def test_query_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5463,13 +6053,13 @@ async def test_query_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.QueryAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse()) + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.QueryAssetsResponse() + ) await client.query_assets(request) # Establish that the underlying gRPC stub method was called. @@ -5480,16 +6070,19 @@ async def test_query_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) -def test_create_saved_query(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateSavedQueryRequest, + dict, + ], +) +def test_create_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5501,14 +6094,14 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.create_saved_query(request) @@ -5520,10 +6113,10 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_create_saved_query_non_empty_request_with_auto_populated_field(): @@ -5531,30 +6124,33 @@ def test_create_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query_id="saved_query_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_saved_query), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query_id="saved_query_id_value", ) + def test_create_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5569,12 +6165,18 @@ def test_create_saved_query_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_saved_query in client._transport._wrapped_methods + assert ( + client._transport.create_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_saved_query + ] = mock_rpc request = {} client.create_saved_query(request) @@ -5587,8 +6189,11 @@ def test_create_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5602,12 +6207,17 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_saved_query + ] = mock_rpc request = {} await client.create_saved_query(request) @@ -5621,8 +6231,11 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): +async def test_create_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.CreateSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5634,15 +6247,17 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) response = await client.create_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -5653,16 +6268,17 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio async def test_create_saved_query_async_from_dict(): await test_create_saved_query_async(request_type=dict) + def test_create_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5672,12 +6288,12 @@ def test_create_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.CreateSavedQueryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.create_saved_query(request) @@ -5689,9 +6305,9 @@ def test_create_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5704,13 +6320,15 @@ async def test_create_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.CreateSavedQueryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + type(client.transport.create_saved_query), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) await client.create_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -5721,9 +6339,9 @@ async def test_create_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_saved_query_flattened(): @@ -5733,16 +6351,16 @@ def test_create_saved_query_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) # Establish that the underlying call was made with the expected @@ -5750,13 +6368,13 @@ def test_create_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' + mock_val = "saved_query_id_value" assert arg == mock_val @@ -5770,11 +6388,12 @@ def test_create_saved_query_flattened_error(): with pytest.raises(ValueError): client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) + @pytest.mark.asyncio async def test_create_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -5783,18 +6402,20 @@ async def test_create_saved_query_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) # Establish that the underlying call was made with the expected @@ -5802,15 +6423,16 @@ async def test_create_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' + mock_val = "saved_query_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -5822,17 +6444,20 @@ async def test_create_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) -def test_get_saved_query(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetSavedQueryRequest, + dict, + ], +) +def test_get_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5843,15 +6468,13 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.get_saved_query(request) @@ -5863,10 +6486,10 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_get_saved_query_non_empty_request_with_auto_populated_field(): @@ -5874,28 +6497,29 @@ def test_get_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.GetSavedQueryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetSavedQueryRequest( - name='name_value', + name="name_value", ) + def test_get_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5914,7 +6538,9 @@ def test_get_saved_query_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc request = {} client.get_saved_query(request) @@ -5928,8 +6554,11 @@ def test_get_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5943,12 +6572,17 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_saved_query + ] = mock_rpc request = {} await client.get_saved_query(request) @@ -5962,8 +6596,11 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): +async def test_get_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.GetSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5974,16 +6611,16 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) response = await client.get_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -5994,16 +6631,17 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio async def test_get_saved_query_async_from_dict(): await test_get_saved_query_async(request_type=dict) + def test_get_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6013,12 +6651,10 @@ def test_get_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.GetSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: call.return_value = asset_service.SavedQuery() client.get_saved_query(request) @@ -6030,9 +6666,9 @@ def test_get_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6045,13 +6681,13 @@ async def test_get_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.GetSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) await client.get_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -6062,9 +6698,9 @@ async def test_get_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_saved_query_flattened(): @@ -6073,15 +6709,13 @@ def test_get_saved_query_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6089,7 +6723,7 @@ def test_get_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -6103,9 +6737,10 @@ def test_get_saved_query_flattened_error(): with pytest.raises(ValueError): client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -6113,17 +6748,17 @@ async def test_get_saved_query_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6131,9 +6766,10 @@ async def test_get_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -6145,15 +6781,18 @@ async def test_get_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) -def test_list_saved_queries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListSavedQueriesRequest, + dict, + ], +) +def test_list_saved_queries(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6165,11 +6804,11 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_saved_queries(request) @@ -6181,7 +6820,7 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_saved_queries_non_empty_request_with_auto_populated_field(): @@ -6189,32 +6828,35 @@ def test_list_saved_queries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', + parent="parent_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.list_saved_queries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_saved_queries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', + parent="parent_value", + filter="filter_value", + page_token="page_token_value", ) + def test_list_saved_queries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6229,12 +6871,18 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_saved_queries in client._transport._wrapped_methods + assert ( + client._transport.list_saved_queries in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_saved_queries + ] = mock_rpc request = {} client.list_saved_queries(request) @@ -6247,8 +6895,11 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_saved_queries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6262,12 +6913,17 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_saved_queries in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_saved_queries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_saved_queries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_saved_queries + ] = mock_rpc request = {} await client.list_saved_queries(request) @@ -6281,8 +6937,11 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): +async def test_list_saved_queries_async( + transport: str = "grpc_asyncio", request_type=asset_service.ListSavedQueriesRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6294,12 +6953,14 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_saved_queries(request) # Establish that the underlying gRPC stub method was called. @@ -6310,13 +6971,14 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_saved_queries_async_from_dict(): await test_list_saved_queries_async(request_type=dict) + def test_list_saved_queries_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6326,12 +6988,12 @@ def test_list_saved_queries_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListSavedQueriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: call.return_value = asset_service.ListSavedQueriesResponse() client.list_saved_queries(request) @@ -6343,9 +7005,9 @@ def test_list_saved_queries_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6358,13 +7020,15 @@ async def test_list_saved_queries_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListSavedQueriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + type(client.transport.list_saved_queries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse() + ) await client.list_saved_queries(request) # Establish that the underlying gRPC stub method was called. @@ -6375,9 +7039,9 @@ async def test_list_saved_queries_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_saved_queries_flattened(): @@ -6387,14 +7051,14 @@ def test_list_saved_queries_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListSavedQueriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_saved_queries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6402,7 +7066,7 @@ def test_list_saved_queries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -6416,9 +7080,10 @@ def test_list_saved_queries_flattened_error(): with pytest.raises(ValueError): client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_saved_queries_flattened_async(): client = AssetServiceAsyncClient( @@ -6427,16 +7092,18 @@ async def test_list_saved_queries_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListSavedQueriesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_saved_queries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6444,9 +7111,10 @@ async def test_list_saved_queries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_saved_queries_flattened_error_async(): client = AssetServiceAsyncClient( @@ -6458,7 +7126,7 @@ async def test_list_saved_queries_flattened_error_async(): with pytest.raises(ValueError): await client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -6470,8 +7138,8 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6480,17 +7148,17 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6505,9 +7173,7 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_saved_queries(request={}, retry=retry, timeout=timeout) @@ -6517,8 +7183,9 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in results) + assert all(isinstance(i, asset_service.SavedQuery) for i in results) + + def test_list_saved_queries_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6527,8 +7194,8 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6537,17 +7204,17 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6558,9 +7225,10 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_saved_queries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_saved_queries_async_pager(): client = AssetServiceAsyncClient( @@ -6569,8 +7237,10 @@ async def test_list_saved_queries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_saved_queries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6579,17 +7249,17 @@ async def test_list_saved_queries_async_pager(): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6599,15 +7269,16 @@ async def test_list_saved_queries_async_pager(): ), RuntimeError, ) - async_pager = await client.list_saved_queries(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_saved_queries( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in responses) + assert all(isinstance(i, asset_service.SavedQuery) for i in responses) @pytest.mark.asyncio @@ -6618,8 +7289,10 @@ async def test_list_saved_queries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_saved_queries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6628,17 +7301,17 @@ async def test_list_saved_queries_async_pages(): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6651,18 +7324,22 @@ async def test_list_saved_queries_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_saved_queries(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) -def test_update_saved_query(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateSavedQueryRequest, + dict, + ], +) +def test_update_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6674,14 +7351,14 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.update_saved_query(request) @@ -6693,10 +7370,10 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_update_saved_query_non_empty_request_with_auto_populated_field(): @@ -6704,25 +7381,26 @@ def test_update_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = asset_service.UpdateSavedQueryRequest( - ) + request = asset_service.UpdateSavedQueryRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_saved_query), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest( - ) + assert args[0] == asset_service.UpdateSavedQueryRequest() + def test_update_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6738,12 +7416,18 @@ def test_update_saved_query_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_saved_query in client._transport._wrapped_methods + assert ( + client._transport.update_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_saved_query + ] = mock_rpc request = {} client.update_saved_query(request) @@ -6756,8 +7440,11 @@ def test_update_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6771,12 +7458,17 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_saved_query + ] = mock_rpc request = {} await client.update_saved_query(request) @@ -6790,8 +7482,11 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): +async def test_update_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.UpdateSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6803,15 +7498,17 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) response = await client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -6822,16 +7519,17 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio async def test_update_saved_query_async_from_dict(): await test_update_saved_query_async(request_type=dict) + def test_update_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6841,12 +7539,12 @@ def test_update_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.UpdateSavedQueryRequest() - request.saved_query.name = 'name_value' + request.saved_query.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.update_saved_query(request) @@ -6858,9 +7556,9 @@ def test_update_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'saved_query.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "saved_query.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6873,13 +7571,15 @@ async def test_update_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.UpdateSavedQueryRequest() - request.saved_query.name = 'name_value' + request.saved_query.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + type(client.transport.update_saved_query), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) await client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -6890,9 +7590,9 @@ async def test_update_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'saved_query.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "saved_query.name=name_value", + ) in kw["metadata"] def test_update_saved_query_flattened(): @@ -6902,15 +7602,15 @@ def test_update_saved_query_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6918,10 +7618,10 @@ def test_update_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -6935,10 +7635,11 @@ def test_update_saved_query_flattened_error(): with pytest.raises(ValueError): client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -6947,17 +7648,19 @@ async def test_update_saved_query_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6965,12 +7668,13 @@ async def test_update_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -6982,16 +7686,19 @@ async def test_update_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) -def test_delete_saved_query(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteSavedQueryRequest, + dict, + ], +) +def test_delete_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7003,8 +7710,8 @@ def test_delete_saved_query(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_saved_query(request) @@ -7024,28 +7731,31 @@ def test_delete_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.DeleteSavedQueryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.delete_saved_query), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteSavedQueryRequest( - name='name_value', + name="name_value", ) + def test_delete_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7060,12 +7770,18 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_saved_query in client._transport._wrapped_methods + assert ( + client._transport.delete_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_saved_query + ] = mock_rpc request = {} client.delete_saved_query(request) @@ -7078,8 +7794,11 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7093,12 +7812,17 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_saved_query + ] = mock_rpc request = {} await client.delete_saved_query(request) @@ -7112,8 +7836,11 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): +async def test_delete_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.DeleteSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7125,8 +7852,8 @@ async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_saved_query(request) @@ -7145,6 +7872,7 @@ async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request async def test_delete_saved_query_async_from_dict(): await test_delete_saved_query_async(request_type=dict) + def test_delete_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7154,12 +7882,12 @@ def test_delete_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.DeleteSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: call.return_value = None client.delete_saved_query(request) @@ -7171,9 +7899,9 @@ def test_delete_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7186,12 +7914,12 @@ async def test_delete_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.DeleteSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_saved_query(request) @@ -7203,9 +7931,9 @@ async def test_delete_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_saved_query_flattened(): @@ -7215,14 +7943,14 @@ def test_delete_saved_query_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7230,7 +7958,7 @@ def test_delete_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7244,9 +7972,10 @@ def test_delete_saved_query_flattened_error(): with pytest.raises(ValueError): client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -7255,8 +7984,8 @@ async def test_delete_saved_query_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -7264,7 +7993,7 @@ async def test_delete_saved_query_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7272,9 +8001,10 @@ async def test_delete_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -7286,15 +8016,18 @@ async def test_delete_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) -def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, + ], +) +def test_batch_get_effective_iam_policies(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7306,11 +8039,10 @@ def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc') # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() response = client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7328,28 +8060,31 @@ def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_ # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', + scope="scope_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.batch_get_effective_iam_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', + scope="scope_value", ) + def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7364,12 +8099,19 @@ def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.batch_get_effective_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_effective_iam_policies + ] = mock_rpc request = {} client.batch_get_effective_iam_policies(request) @@ -7382,8 +8124,11 @@ def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7397,12 +8142,17 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.batch_get_effective_iam_policies in client._client._transport._wrapped_methods + assert ( + client._client._transport.batch_get_effective_iam_policies + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_effective_iam_policies] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_effective_iam_policies + ] = mock_rpc request = {} await client.batch_get_effective_iam_policies(request) @@ -7416,8 +8166,12 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): +async def test_batch_get_effective_iam_policies_async( + transport: str = "grpc_asyncio", + request_type=asset_service.BatchGetEffectiveIamPoliciesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7429,11 +8183,12 @@ async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asy # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) response = await client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7450,6 +8205,7 @@ async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asy async def test_batch_get_effective_iam_policies_async_from_dict(): await test_batch_get_effective_iam_policies_async(request_type=dict) + def test_batch_get_effective_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7459,12 +8215,12 @@ def test_batch_get_effective_iam_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetEffectiveIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() client.batch_get_effective_iam_policies(request) @@ -7476,9 +8232,9 @@ def test_batch_get_effective_iam_policies_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7491,13 +8247,15 @@ async def test_batch_get_effective_iam_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetEffectiveIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse()) + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) await client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7508,16 +8266,19 @@ async def test_batch_get_effective_iam_policies_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) -def test_analyze_org_policies(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, + ], +) +def test_analyze_org_policies(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7529,11 +8290,11 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policies(request) @@ -7545,7 +8306,7 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): @@ -7553,34 +8314,37 @@ def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_org_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_org_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) + def test_analyze_org_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7595,12 +8359,18 @@ def test_analyze_org_policies_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policies in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policies + ] = mock_rpc request = {} client.analyze_org_policies(request) @@ -7613,8 +8383,11 @@ def test_analyze_org_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_org_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7628,12 +8401,17 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_org_policies in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_org_policies + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policies] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_org_policies + ] = mock_rpc request = {} await client.analyze_org_policies(request) @@ -7647,8 +8425,12 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): +async def test_analyze_org_policies_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeOrgPoliciesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7660,12 +8442,14 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.analyze_org_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7676,13 +8460,14 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_analyze_org_policies_async_from_dict(): await test_analyze_org_policies_async(request_type=dict) + def test_analyze_org_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7692,12 +8477,12 @@ def test_analyze_org_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPoliciesResponse() client.analyze_org_policies(request) @@ -7709,9 +8494,9 @@ def test_analyze_org_policies_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7724,13 +8509,15 @@ async def test_analyze_org_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + type(client.transport.analyze_org_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse() + ) await client.analyze_org_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7741,9 +8528,9 @@ async def test_analyze_org_policies_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_analyze_org_policies_flattened(): @@ -7753,16 +8540,16 @@ def test_analyze_org_policies_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -7770,13 +8557,13 @@ def test_analyze_org_policies_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -7790,11 +8577,12 @@ def test_analyze_org_policies_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) + @pytest.mark.asyncio async def test_analyze_org_policies_flattened_async(): client = AssetServiceAsyncClient( @@ -7803,18 +8591,20 @@ async def test_analyze_org_policies_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPoliciesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -7822,15 +8612,16 @@ async def test_analyze_org_policies_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val + @pytest.mark.asyncio async def test_analyze_org_policies_flattened_error_async(): client = AssetServiceAsyncClient( @@ -7842,9 +8633,9 @@ async def test_analyze_org_policies_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -7856,8 +8647,8 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -7866,17 +8657,17 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -7891,9 +8682,7 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.analyze_org_policies(request={}, retry=retry, timeout=timeout) @@ -7901,10 +8690,14 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): assert pager._retry == retry assert pager._timeout == timeout - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in results) + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in results + ) + + def test_analyze_org_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7913,8 +8706,8 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -7923,17 +8716,17 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -7944,9 +8737,10 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.analyze_org_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_analyze_org_policies_async_pager(): client = AssetServiceAsyncClient( @@ -7955,8 +8749,10 @@ async def test_analyze_org_policies_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -7965,17 +8761,17 @@ async def test_analyze_org_policies_async_pager(): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -7985,15 +8781,19 @@ async def test_analyze_org_policies_async_pager(): ), RuntimeError, ) - async_pager = await client.analyze_org_policies(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.analyze_org_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in responses) + assert all( + isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in responses + ) @pytest.mark.asyncio @@ -8004,8 +8804,10 @@ async def test_analyze_org_policies_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -8014,17 +8816,17 @@ async def test_analyze_org_policies_async_pages(): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -8037,18 +8839,22 @@ async def test_analyze_org_policies_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.analyze_org_policies(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) -def test_analyze_org_policy_governed_containers(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, + ], +) +def test_analyze_org_policy_governed_containers(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8060,11 +8866,11 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policy_governed_containers(request) @@ -8076,7 +8882,7 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_populated_field(): @@ -8084,34 +8890,37 @@ def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_popu # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_org_policy_governed_containers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) + def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8126,12 +8935,19 @@ def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_containers + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_containers + ] = mock_rpc request = {} client.analyze_org_policy_governed_containers(request) @@ -8144,8 +8960,11 @@ def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8159,12 +8978,17 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_org_policy_governed_containers in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_org_policy_governed_containers + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_containers] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_org_policy_governed_containers + ] = mock_rpc request = {} await client.analyze_org_policy_governed_containers(request) @@ -8178,8 +9002,12 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): +async def test_analyze_org_policy_governed_containers_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8191,12 +9019,14 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.analyze_org_policy_governed_containers(request) # Establish that the underlying gRPC stub method was called. @@ -8207,13 +9037,14 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_from_dict(): await test_analyze_org_policy_governed_containers_async(request_type=dict) + def test_analyze_org_policy_governed_containers_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8223,12 +9054,12 @@ def test_analyze_org_policy_governed_containers_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() client.analyze_org_policy_governed_containers(request) @@ -8240,9 +9071,9 @@ def test_analyze_org_policy_governed_containers_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8255,13 +9086,15 @@ async def test_analyze_org_policy_governed_containers_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + ) await client.analyze_org_policy_governed_containers(request) # Establish that the underlying gRPC stub method was called. @@ -8272,9 +9105,9 @@ async def test_analyze_org_policy_governed_containers_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_analyze_org_policy_governed_containers_flattened(): @@ -8284,16 +9117,16 @@ def test_analyze_org_policy_governed_containers_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8301,13 +9134,13 @@ def test_analyze_org_policy_governed_containers_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -8321,11 +9154,12 @@ def test_analyze_org_policy_governed_containers_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_async(): client = AssetServiceAsyncClient( @@ -8334,18 +9168,20 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8353,15 +9189,16 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_error_async(): client = AssetServiceAsyncClient( @@ -8373,9 +9210,9 @@ async def test_analyze_org_policy_governed_containers_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -8387,8 +9224,8 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8397,17 +9234,17 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8422,11 +9259,11 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), + ) + pager = client.analyze_org_policy_governed_containers( + request={}, retry=retry, timeout=timeout ) - pager = client.analyze_org_policy_governed_containers(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -8434,8 +9271,15 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in results) + assert all( + isinstance( + i, + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer, + ) + for i in results + ) + + def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8444,8 +9288,8 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8454,17 +9298,17 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8475,9 +9319,10 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp RuntimeError, ) pages = list(client.analyze_org_policy_governed_containers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pager(): client = AssetServiceAsyncClient( @@ -8486,8 +9331,10 @@ async def test_analyze_org_policy_governed_containers_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_containers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8496,17 +9343,17 @@ async def test_analyze_org_policy_governed_containers_async_pager(): asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8516,15 +9363,22 @@ async def test_analyze_org_policy_governed_containers_async_pager(): ), RuntimeError, ) - async_pager = await client.analyze_org_policy_governed_containers(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.analyze_org_policy_governed_containers( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in responses) + assert all( + isinstance( + i, + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer, + ) + for i in responses + ) @pytest.mark.asyncio @@ -8535,8 +9389,10 @@ async def test_analyze_org_policy_governed_containers_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_containers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8545,17 +9401,17 @@ async def test_analyze_org_policy_governed_containers_async_pages(): asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8568,18 +9424,22 @@ async def test_analyze_org_policy_governed_containers_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.analyze_org_policy_governed_containers(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) -def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, + ], +) +def test_analyze_org_policy_governed_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8591,11 +9451,11 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policy_governed_assets(request) @@ -8607,7 +9467,7 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populated_field(): @@ -8615,34 +9475,37 @@ def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populate # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_org_policy_governed_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) + def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8657,12 +9520,19 @@ def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_assets + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_assets + ] = mock_rpc request = {} client.analyze_org_policy_governed_assets(request) @@ -8675,8 +9545,11 @@ def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8690,12 +9563,17 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_org_policy_governed_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_org_policy_governed_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_org_policy_governed_assets + ] = mock_rpc request = {} await client.analyze_org_policy_governed_assets(request) @@ -8709,8 +9587,12 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): +async def test_analyze_org_policy_governed_assets_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8722,12 +9604,14 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.analyze_org_policy_governed_assets(request) # Establish that the underlying gRPC stub method was called. @@ -8738,13 +9622,14 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_from_dict(): await test_analyze_org_policy_governed_assets_async(request_type=dict) + def test_analyze_org_policy_governed_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8754,12 +9639,12 @@ def test_analyze_org_policy_governed_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() client.analyze_org_policy_governed_assets(request) @@ -8771,9 +9656,9 @@ def test_analyze_org_policy_governed_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8786,13 +9671,15 @@ async def test_analyze_org_policy_governed_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + ) await client.analyze_org_policy_governed_assets(request) # Establish that the underlying gRPC stub method was called. @@ -8803,9 +9690,9 @@ async def test_analyze_org_policy_governed_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_analyze_org_policy_governed_assets_flattened(): @@ -8815,16 +9702,16 @@ def test_analyze_org_policy_governed_assets_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8832,13 +9719,13 @@ def test_analyze_org_policy_governed_assets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -8852,11 +9739,12 @@ def test_analyze_org_policy_governed_assets_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_async(): client = AssetServiceAsyncClient( @@ -8865,18 +9753,20 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8884,15 +9774,16 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_error_async(): client = AssetServiceAsyncClient( @@ -8904,9 +9795,9 @@ async def test_analyze_org_policy_governed_assets_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -8918,8 +9809,8 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -8928,17 +9819,17 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -8953,11 +9844,11 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), + ) + pager = client.analyze_org_policy_governed_assets( + request={}, retry=retry, timeout=timeout ) - pager = client.analyze_org_policy_governed_assets(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -8965,8 +9856,14 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in results) + assert all( + isinstance( + i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ) + for i in results + ) + + def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8975,8 +9872,8 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -8985,17 +9882,17 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -9006,9 +9903,10 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.analyze_org_policy_governed_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pager(): client = AssetServiceAsyncClient( @@ -9017,8 +9915,10 @@ async def test_analyze_org_policy_governed_assets_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_assets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -9027,17 +9927,17 @@ async def test_analyze_org_policy_governed_assets_async_pager(): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -9047,15 +9947,21 @@ async def test_analyze_org_policy_governed_assets_async_pager(): ), RuntimeError, ) - async_pager = await client.analyze_org_policy_governed_assets(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.analyze_org_policy_governed_assets( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in responses) + assert all( + isinstance( + i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ) + for i in responses + ) @pytest.mark.asyncio @@ -9066,8 +9972,10 @@ async def test_analyze_org_policy_governed_assets_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_assets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -9076,17 +9984,17 @@ async def test_analyze_org_policy_governed_assets_async_pages(): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -9099,11 +10007,11 @@ async def test_analyze_org_policy_governed_assets_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.analyze_org_policy_governed_assets(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -9125,7 +10033,9 @@ def test_export_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc request = {} @@ -9145,80 +10055,94 @@ def test_export_assets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): +def test_export_assets_rest_required_fields( + request_type=asset_service.ExportAssetsRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_assets(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_export_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.export_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "outputConfig", + ) + ) + ) def test_list_assets_rest_use_cached_wrapped_rpc(): @@ -9239,7 +10163,9 @@ def test_list_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc request = {} @@ -9262,50 +10188,62 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", "relationship_types", )) + assert not set(unset_fields) - set( + ( + "asset_types", + "content_type", + "page_size", + "page_token", + "read_time", + "relationship_types", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.ListAssetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9316,23 +10254,36 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR return_value = asset_service.ListAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_assets(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "assetTypes", + "contentType", + "pageSize", + "pageToken", + "readTime", + "relationshipTypes", + ) + ) + & set(("parent",)) + ) def test_list_assets_rest_flattened(): @@ -9342,16 +10293,16 @@ def test_list_assets_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListAssetsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9361,7 +10312,7 @@ def test_list_assets_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.ListAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9371,10 +10322,12 @@ def test_list_assets_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/assets" % client.transport._host, args[1] + ) -def test_list_assets_rest_flattened_error(transport: str = 'rest'): +def test_list_assets_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9385,20 +10338,20 @@ def test_list_assets_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_assets_rest_pager(transport: str = 'rest'): +def test_list_assets_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.ListAssetsResponse( @@ -9407,17 +10360,17 @@ def test_list_assets_rest_pager(transport: str = 'rest'): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -9433,21 +10386,20 @@ def test_list_assets_rest_pager(transport: str = 'rest'): response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} pager = client.list_assets(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) + assert all(isinstance(i, assets.Asset) for i in results) pages = list(client.list_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -9465,12 +10417,19 @@ def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_assets_history in client._transport._wrapped_methods + assert ( + client._transport.batch_get_assets_history + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_assets_history + ] = mock_rpc request = {} client.batch_get_assets_history(request) @@ -9485,57 +10444,69 @@ def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_batch_get_assets_history_rest_required_fields( + request_type=asset_service.BatchGetAssetsHistoryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_assets_history._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_assets_history._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", "relationship_types", )) + assert not set(unset_fields) - set( + ( + "asset_names", + "content_type", + "read_time_window", + "relationship_types", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.BatchGetAssetsHistoryResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9546,23 +10517,34 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_assets_history(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_batch_get_assets_history_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "assetNames", + "contentType", + "readTimeWindow", + "relationshipTypes", + ) + ) + & set(("parent",)) + ) def test_create_feed_rest_use_cached_wrapped_rpc(): @@ -9583,7 +10565,9 @@ def test_create_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc request = {} @@ -9607,53 +10591,56 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR request_init["feed_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' - jsonified_request["feedId"] = 'feed_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["feedId"] = "feed_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "feedId" in jsonified_request - assert jsonified_request["feedId"] == 'feed_id_value' + assert jsonified_request["feedId"] == "feed_id_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -9663,23 +10650,33 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "feedId", + "feed", + ) + ) + ) def test_create_feed_rest_flattened(): @@ -9689,16 +10686,16 @@ def test_create_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9708,7 +10705,7 @@ def test_create_feed_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9718,10 +10715,12 @@ def test_create_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1] + ) -def test_create_feed_rest_flattened_error(transport: str = 'rest'): +def test_create_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9732,7 +10731,7 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) @@ -9754,7 +10753,9 @@ def test_get_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc request = {} @@ -9777,48 +10778,51 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9829,23 +10833,24 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_feed_rest_flattened(): @@ -9855,16 +10860,16 @@ def test_get_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {"name": "sample1/sample2/feeds/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -9874,7 +10879,7 @@ def test_get_feed_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9884,10 +10889,12 @@ def test_get_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1] + ) -def test_get_feed_rest_flattened_error(transport: str = 'rest'): +def test_get_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9898,7 +10905,7 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) @@ -9920,7 +10927,9 @@ def test_list_feeds_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc request = {} @@ -9943,48 +10952,51 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.ListFeedsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9995,23 +11007,24 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq return_value = asset_service.ListFeedsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_feeds(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_feeds_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_feeds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) + assert set(unset_fields) == (set(()) & set(("parent",))) def test_list_feeds_rest_flattened(): @@ -10021,16 +11034,16 @@ def test_list_feeds_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListFeedsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -10040,7 +11053,7 @@ def test_list_feeds_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.ListFeedsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10050,10 +11063,12 @@ def test_list_feeds_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1] + ) -def test_list_feeds_rest_flattened_error(transport: str = 'rest'): +def test_list_feeds_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10064,7 +11079,7 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -10086,7 +11101,9 @@ def test_update_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc request = {} @@ -10108,46 +11125,49 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -10157,23 +11177,32 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "feed", + "updateMask", + ) + ) + ) def test_update_feed_rest_flattened(): @@ -10183,16 +11212,16 @@ def test_update_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # get arguments that satisfy an http rule for this method - sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + sample_request = {"feed": {"name": "sample1/sample2/feeds/sample3"}} # get truthy value for each flattened field mock_args = dict( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) mock_args.update(sample_request) @@ -10202,7 +11231,7 @@ def test_update_feed_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10212,10 +11241,12 @@ def test_update_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1] + ) -def test_update_feed_rest_flattened_error(transport: str = 'rest'): +def test_update_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10226,7 +11257,7 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) @@ -10248,7 +11279,9 @@ def test_delete_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc request = {} @@ -10271,72 +11304,76 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = '' + json_return_value = "" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_feed_rest_flattened(): @@ -10346,24 +11383,24 @@ def test_delete_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {"name": "sample1/sample2/feeds/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10373,10 +11410,12 @@ def test_delete_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1] + ) -def test_delete_feed_rest_flattened_error(transport: str = 'rest'): +def test_delete_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10387,7 +11426,7 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) @@ -10405,12 +11444,18 @@ def test_search_all_resources_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods + assert ( + client._transport.search_all_resources in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_all_resources + ] = mock_rpc request = {} client.search_all_resources(request) @@ -10425,57 +11470,71 @@ def test_search_all_resources_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): +def test_search_all_resources_rest_required_fields( + request_type=asset_service.SearchAllResourcesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["scope"] = 'scope_value' + jsonified_request["scope"] = "scope_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", "read_mask", )) + assert not set(unset_fields) - set( + ( + "asset_types", + "order_by", + "page_size", + "page_token", + "query", + "read_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllResourcesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10486,23 +11545,36 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se return_value = asset_service.SearchAllResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_resources(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_search_all_resources_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.search_all_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) + assert set(unset_fields) == ( + set( + ( + "assetTypes", + "orderBy", + "pageSize", + "pageToken", + "query", + "readMask", + ) + ) + & set(("scope",)) + ) def test_search_all_resources_rest_flattened(): @@ -10512,18 +11584,18 @@ def test_search_all_resources_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllResourcesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) mock_args.update(sample_request) @@ -10533,7 +11605,7 @@ def test_search_all_resources_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SearchAllResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10543,10 +11615,12 @@ def test_search_all_resources_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1] + ) -def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): +def test_search_all_resources_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10557,22 +11631,22 @@ def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) -def test_search_all_resources_rest_pager(transport: str = 'rest'): +def test_search_all_resources_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.SearchAllResourcesResponse( @@ -10581,17 +11655,17 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -10604,24 +11678,25 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) + response = tuple( + asset_service.SearchAllResourcesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.search_all_resources(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) + assert all(isinstance(i, assets.ResourceSearchResult) for i in results) pages = list(client.search_all_resources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10639,12 +11714,19 @@ def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.search_all_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_all_iam_policies + ] = mock_rpc request = {} client.search_all_iam_policies(request) @@ -10659,57 +11741,70 @@ def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): +def test_search_all_iam_policies_rest_required_fields( + request_type=asset_service.SearchAllIamPoliciesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["scope"] = 'scope_value' + jsonified_request["scope"] = "scope_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + assert not set(unset_fields) - set( + ( + "asset_types", + "order_by", + "page_size", + "page_token", + "query", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllIamPoliciesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10720,23 +11815,35 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_iam_policies(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_search_all_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + assert set(unset_fields) == ( + set( + ( + "assetTypes", + "orderBy", + "pageSize", + "pageToken", + "query", + ) + ) + & set(("scope",)) + ) def test_search_all_iam_policies_rest_flattened(): @@ -10746,17 +11853,17 @@ def test_search_all_iam_policies_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllIamPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) mock_args.update(sample_request) @@ -10766,7 +11873,7 @@ def test_search_all_iam_policies_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10776,10 +11883,12 @@ def test_search_all_iam_policies_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1] + ) -def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): +def test_search_all_iam_policies_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10790,21 +11899,21 @@ def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) -def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): +def test_search_all_iam_policies_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.SearchAllIamPoliciesResponse( @@ -10813,17 +11922,17 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -10836,24 +11945,25 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) + response = tuple( + asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.search_all_iam_policies(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) + assert all(isinstance(i, assets.IamPolicySearchResult) for i in results) pages = list(client.search_all_iam_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10871,12 +11981,18 @@ def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_iam_policy + ] = mock_rpc request = {} client.analyze_iam_policy(request) @@ -10891,52 +12007,63 @@ def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_iam_policy_rest_required_fields( + request_type=asset_service.AnalyzeIamPolicyRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("analysis_query", "execution_timeout", "saved_analysis_query", )) + assert not set(unset_fields) - set( + ( + "analysis_query", + "execution_timeout", + "saved_analysis_query", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeIamPolicyResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10947,23 +12074,33 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_iam_policy_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) + assert set(unset_fields) == ( + set( + ( + "analysisQuery", + "executionTimeout", + "savedAnalysisQuery", + ) + ) + & set(("analysisQuery",)) + ) def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): @@ -10980,12 +12117,19 @@ def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy_longrunning + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_iam_policy_longrunning + ] = mock_rpc request = {} client.analyze_iam_policy_longrunning(request) @@ -11004,75 +12148,91 @@ def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_iam_policy_longrunning_rest_required_fields( + request_type=asset_service.AnalyzeIamPolicyLongrunningRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy_longrunning(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) + unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "analysisQuery", + "outputConfig", + ) + ) + ) def test_analyze_move_rest_use_cached_wrapped_rpc(): @@ -11093,7 +12253,9 @@ def test_analyze_move_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc request = {} @@ -11109,7 +12271,9 @@ def test_analyze_move_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMoveRequest): +def test_analyze_move_rest_required_fields( + request_type=asset_service.AnalyzeMoveRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -11117,56 +12281,64 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov request_init["destination_parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "destinationParent" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_move._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "destinationParent" in jsonified_request assert jsonified_request["destinationParent"] == request_init["destination_parent"] - jsonified_request["resource"] = 'resource_value' - jsonified_request["destinationParent"] = 'destination_parent_value' + jsonified_request["resource"] = "resource_value" + jsonified_request["destinationParent"] = "destination_parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_move._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("destination_parent", "view", )) + assert not set(unset_fields) - set( + ( + "destination_parent", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' + assert jsonified_request["resource"] == "resource_value" assert "destinationParent" in jsonified_request - assert jsonified_request["destinationParent"] == 'destination_parent_value' + assert jsonified_request["destinationParent"] == "destination_parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeMoveResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -11177,7 +12349,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov return_value = asset_service.AnalyzeMoveResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11189,15 +12361,30 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_move_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.analyze_move._get_unset_required_fields({}) - assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) + assert set(unset_fields) == ( + set( + ( + "destinationParent", + "view", + ) + ) + & set( + ( + "resource", + "destinationParent", + ) + ) + ) def test_query_assets_rest_use_cached_wrapped_rpc(): @@ -11218,7 +12405,9 @@ def test_query_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc request = {} @@ -11234,57 +12423,62 @@ def test_query_assets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): +def test_query_assets_rest_required_fields( + request_type=asset_service.QueryAssetsRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.QueryAssetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -11294,23 +12488,24 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.query_assets(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_query_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.query_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) + assert set(unset_fields) == (set(()) & set(("parent",))) def test_create_saved_query_rest_use_cached_wrapped_rpc(): @@ -11327,12 +12522,18 @@ def test_create_saved_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_saved_query in client._transport._wrapped_methods + assert ( + client._transport.create_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_saved_query + ] = mock_rpc request = {} client.create_saved_query(request) @@ -11347,7 +12548,9 @@ def test_create_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_saved_query_rest_required_fields(request_type=asset_service.CreateSavedQueryRequest): +def test_create_saved_query_rest_required_fields( + request_type=asset_service.CreateSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -11355,58 +12558,61 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea request_init["saved_query_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "savedQueryId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "savedQueryId" in jsonified_request assert jsonified_request["savedQueryId"] == request_init["saved_query_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["savedQueryId"] = 'saved_query_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["savedQueryId"] = "saved_query_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("saved_query_id", )) + assert not set(unset_fields) - set(("saved_query_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "savedQueryId" in jsonified_request - assert jsonified_request["savedQueryId"] == 'saved_query_id_value' + assert jsonified_request["savedQueryId"] == "saved_query_id_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -11416,7 +12622,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11428,15 +12634,26 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) + assert set(unset_fields) == ( + set(("savedQueryId",)) + & set( + ( + "parent", + "savedQuery", + "savedQueryId", + ) + ) + ) def test_create_saved_query_rest_flattened(): @@ -11446,18 +12663,18 @@ def test_create_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) mock_args.update(sample_request) @@ -11467,7 +12684,7 @@ def test_create_saved_query_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11477,10 +12694,12 @@ def test_create_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1] + ) -def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_create_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11491,9 +12710,9 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) @@ -11515,7 +12734,9 @@ def test_get_saved_query_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc request = {} @@ -11531,55 +12752,60 @@ def test_get_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSavedQueryRequest): +def test_get_saved_query_rest_required_fields( + request_type=asset_service.GetSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -11590,23 +12816,24 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_saved_query(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_saved_query_rest_flattened(): @@ -11616,16 +12843,16 @@ def test_get_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} + sample_request = {"name": "sample1/sample2/savedQueries/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -11635,7 +12862,7 @@ def test_get_saved_query_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11645,10 +12872,12 @@ def test_get_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1] + ) -def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_get_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11659,7 +12888,7 @@ def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -11677,12 +12906,18 @@ def test_list_saved_queries_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_saved_queries in client._transport._wrapped_methods + assert ( + client._transport.list_saved_queries in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_saved_queries + ] = mock_rpc request = {} client.list_saved_queries(request) @@ -11697,57 +12932,68 @@ def test_list_saved_queries_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_saved_queries_rest_required_fields(request_type=asset_service.ListSavedQueriesRequest): +def test_list_saved_queries_rest_required_fields( + request_type=asset_service.ListSavedQueriesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_saved_queries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_saved_queries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.ListSavedQueriesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -11758,23 +13004,33 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_saved_queries(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_saved_queries_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_saved_queries._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_saved_queries_rest_flattened(): @@ -11784,16 +13040,16 @@ def test_list_saved_queries_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListSavedQueriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -11803,7 +13059,7 @@ def test_list_saved_queries_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11813,10 +13069,12 @@ def test_list_saved_queries_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1] + ) -def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): +def test_list_saved_queries_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11827,20 +13085,20 @@ def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_saved_queries_rest_pager(transport: str = 'rest'): +def test_list_saved_queries_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.ListSavedQueriesResponse( @@ -11849,17 +13107,17 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -11872,24 +13130,25 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.ListSavedQueriesResponse.to_json(x) for x in response) + response = tuple( + asset_service.ListSavedQueriesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} pager = client.list_saved_queries(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in results) + assert all(isinstance(i, asset_service.SavedQuery) for i in results) pages = list(client.list_saved_queries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -11907,12 +13166,18 @@ def test_update_saved_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_saved_query in client._transport._wrapped_methods + assert ( + client._transport.update_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_saved_query + ] = mock_rpc request = {} client.update_saved_query(request) @@ -11927,54 +13192,59 @@ def test_update_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_saved_query_rest_required_fields(request_type=asset_service.UpdateSavedQueryRequest): +def test_update_saved_query_rest_required_fields( + request_type=asset_service.UpdateSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -11984,23 +13254,32 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_saved_query(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "savedQuery", + "updateMask", + ) + ) + ) def test_update_saved_query_rest_flattened(): @@ -12010,17 +13289,19 @@ def test_update_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + sample_request = { + "saved_query": {"name": "sample1/sample2/savedQueries/sample3"} + } # get truthy value for each flattened field mock_args = dict( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -12030,7 +13311,7 @@ def test_update_saved_query_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12040,10 +13321,13 @@ def test_update_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{saved_query.name=*/*/savedQueries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{saved_query.name=*/*/savedQueries/*}" % client.transport._host, + args[1], + ) -def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_update_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12054,8 +13338,8 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -12073,12 +13357,18 @@ def test_delete_saved_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_saved_query in client._transport._wrapped_methods + assert ( + client._transport.delete_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_saved_query + ] = mock_rpc request = {} client.delete_saved_query(request) @@ -12093,79 +13383,85 @@ def test_delete_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_saved_query_rest_required_fields(request_type=asset_service.DeleteSavedQueryRequest): +def test_delete_saved_query_rest_required_fields( + request_type=asset_service.DeleteSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = '' + json_return_value = "" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_saved_query(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_saved_query_rest_flattened(): @@ -12175,24 +13471,24 @@ def test_delete_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} + sample_request = {"name": "sample1/sample2/savedQueries/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12202,10 +13498,12 @@ def test_delete_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1] + ) -def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_delete_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12216,7 +13514,7 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -12234,12 +13532,19 @@ def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.batch_get_effective_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_effective_iam_policies + ] = mock_rpc request = {} client.batch_get_effective_iam_policies(request) @@ -12254,7 +13559,9 @@ def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): +def test_batch_get_effective_iam_policies_rest_required_fields( + request_type=asset_service.BatchGetEffectiveIamPoliciesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12262,56 +13569,59 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse request_init["names"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "names" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "names" in jsonified_request assert jsonified_request["names"] == request_init["names"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["names"] = 'names_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["names"] = "names_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("names", )) + assert not set(unset_fields) - set(("names",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "names" in jsonified_request - assert jsonified_request["names"] == 'names_value' + assert jsonified_request["names"] == "names_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12319,10 +13629,12 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12334,15 +13646,27 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_batch_get_effective_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.batch_get_effective_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) + unset_fields = ( + transport.batch_get_effective_iam_policies._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("names",)) + & set( + ( + "scope", + "names", + ) + ) + ) def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): @@ -12359,12 +13683,18 @@ def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policies in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policies + ] = mock_rpc request = {} client.analyze_org_policies(request) @@ -12379,7 +13709,9 @@ def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_org_policies_rest_required_fields(request_type=asset_service.AnalyzeOrgPoliciesRequest): +def test_analyze_org_policies_rest_required_fields( + request_type=asset_service.AnalyzeOrgPoliciesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12387,56 +13719,66 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "constraint", + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPoliciesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12447,7 +13789,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12459,15 +13801,32 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_org_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.analyze_org_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) + assert set(unset_fields) == ( + set( + ( + "constraint", + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "scope", + "constraint", + ) + ) + ) def test_analyze_org_policies_rest_flattened(): @@ -12477,18 +13836,18 @@ def test_analyze_org_policies_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12498,7 +13857,7 @@ def test_analyze_org_policies_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12508,10 +13867,12 @@ def test_analyze_org_policies_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicies" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:analyzeOrgPolicies" % client.transport._host, args[1] + ) -def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policies_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12522,22 +13883,22 @@ def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) -def test_analyze_org_policies_rest_pager(transport: str = 'rest'): +def test_analyze_org_policies_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -12546,17 +13907,17 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -12569,24 +13930,28 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPoliciesResponse.to_json(x) for x in response) + response = tuple( + asset_service.AnalyzeOrgPoliciesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.analyze_org_policies(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in results) + assert all( + isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in results + ) pages = list(client.analyze_org_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -12604,12 +13969,19 @@ def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_containers + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_containers + ] = mock_rpc request = {} client.analyze_org_policy_governed_containers(request) @@ -12624,7 +13996,9 @@ def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_org_policy_governed_containers_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): +def test_analyze_org_policy_governed_containers_rest_required_fields( + request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12632,56 +14006,70 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_containers._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_containers._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "constraint", + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12689,10 +14077,12 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12704,15 +14094,34 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) + unset_fields = ( + transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "constraint", + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "scope", + "constraint", + ) + ) + ) def test_analyze_org_policy_governed_containers_rest_flattened(): @@ -12722,18 +14131,18 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12741,9 +14150,11 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12753,10 +14164,16 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers" + % client.transport._host, + args[1], + ) -def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policy_governed_containers_rest_flattened_error( + transport: str = "rest", +): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12767,22 +14184,22 @@ def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: with pytest.raises(ValueError): client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) -def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'rest'): +def test_analyze_org_policy_governed_containers_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -12791,17 +14208,17 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -12814,24 +14231,34 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(x) for x in response) + response = tuple( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.analyze_org_policy_governed_containers(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in results) + assert all( + isinstance( + i, + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer, + ) + for i in results + ) - pages = list(client.analyze_org_policy_governed_containers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.analyze_org_policy_governed_containers(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -12849,12 +14276,19 @@ def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_assets + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_assets + ] = mock_rpc request = {} client.analyze_org_policy_governed_assets(request) @@ -12869,7 +14303,9 @@ def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): +def test_analyze_org_policy_governed_assets_rest_required_fields( + request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12877,56 +14313,66 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "constraint", + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12934,10 +14380,12 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12949,15 +14397,34 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) + unset_fields = ( + transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "constraint", + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "scope", + "constraint", + ) + ) + ) def test_analyze_org_policy_governed_assets_rest_flattened(): @@ -12967,18 +14434,18 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12986,9 +14453,11 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12998,10 +14467,15 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets" % client.transport._host, + args[1], + ) -def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policy_governed_assets_rest_flattened_error( + transport: str = "rest", +): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13012,22 +14486,22 @@ def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str with pytest.raises(ValueError): client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) -def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): +def test_analyze_org_policy_governed_assets_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -13036,17 +14510,17 @@ def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -13059,24 +14533,33 @@ def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(x) for x in response) + response = tuple( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.analyze_org_policy_governed_assets(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in results) + assert all( + isinstance( + i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ) + for i in results + ) - pages = list(client.analyze_org_policy_governed_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.analyze_org_policy_governed_assets(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -13118,8 +14601,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = AssetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -13141,6 +14623,7 @@ def test_transport_instance(): client = AssetServiceClient(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AssetServiceGrpcTransport( @@ -13155,18 +14638,23 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssetServiceGrpcTransport, + transports.AssetServiceGrpcAsyncIOTransport, + transports.AssetServiceRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = AssetServiceClient.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -13176,8 +14664,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -13191,10 +14678,8 @@ def test_export_assets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.export_assets(request=None) # Establish that the underlying stub method was called. @@ -13214,9 +14699,7 @@ def test_list_assets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: call.return_value = asset_service.ListAssetsResponse() client.list_assets(request=None) @@ -13238,8 +14721,8 @@ def test_batch_get_assets_history_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: call.return_value = asset_service.BatchGetAssetsHistoryResponse() client.batch_get_assets_history(request=None) @@ -13260,9 +14743,7 @@ def test_create_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: call.return_value = asset_service.Feed() client.create_feed(request=None) @@ -13283,9 +14764,7 @@ def test_get_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: call.return_value = asset_service.Feed() client.get_feed(request=None) @@ -13306,9 +14785,7 @@ def test_list_feeds_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: call.return_value = asset_service.ListFeedsResponse() client.list_feeds(request=None) @@ -13329,9 +14806,7 @@ def test_update_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: call.return_value = asset_service.Feed() client.update_feed(request=None) @@ -13352,9 +14827,7 @@ def test_delete_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: call.return_value = None client.delete_feed(request=None) @@ -13376,8 +14849,8 @@ def test_search_all_resources_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: call.return_value = asset_service.SearchAllResourcesResponse() client.search_all_resources(request=None) @@ -13399,8 +14872,8 @@ def test_search_all_iam_policies_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: call.return_value = asset_service.SearchAllIamPoliciesResponse() client.search_all_iam_policies(request=None) @@ -13422,8 +14895,8 @@ def test_analyze_iam_policy_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: call.return_value = asset_service.AnalyzeIamPolicyResponse() client.analyze_iam_policy(request=None) @@ -13445,9 +14918,9 @@ def test_analyze_iam_policy_longrunning_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.analyze_iam_policy_longrunning(request=None) # Establish that the underlying stub method was called. @@ -13467,9 +14940,7 @@ def test_analyze_move_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: call.return_value = asset_service.AnalyzeMoveResponse() client.analyze_move(request=None) @@ -13490,9 +14961,7 @@ def test_query_assets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: call.return_value = asset_service.QueryAssetsResponse() client.query_assets(request=None) @@ -13514,8 +14983,8 @@ def test_create_saved_query_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.create_saved_query(request=None) @@ -13536,9 +15005,7 @@ def test_get_saved_query_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: call.return_value = asset_service.SavedQuery() client.get_saved_query(request=None) @@ -13560,8 +15027,8 @@ def test_list_saved_queries_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: call.return_value = asset_service.ListSavedQueriesResponse() client.list_saved_queries(request=None) @@ -13583,8 +15050,8 @@ def test_update_saved_query_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.update_saved_query(request=None) @@ -13606,8 +15073,8 @@ def test_delete_saved_query_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: call.return_value = None client.delete_saved_query(request=None) @@ -13629,8 +15096,8 @@ def test_batch_get_effective_iam_policies_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() client.batch_get_effective_iam_policies(request=None) @@ -13652,8 +15119,8 @@ def test_analyze_org_policies_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPoliciesResponse() client.analyze_org_policies(request=None) @@ -13675,8 +15142,8 @@ def test_analyze_org_policy_governed_containers_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() client.analyze_org_policy_governed_containers(request=None) @@ -13698,8 +15165,8 @@ def test_analyze_org_policy_governed_assets_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() client.analyze_org_policy_governed_assets(request=None) @@ -13720,8 +15187,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -13736,12 +15202,10 @@ async def test_export_assets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.export_assets(request=None) @@ -13763,13 +15227,13 @@ async def test_list_assets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_assets(request=None) # Establish that the underlying stub method was called. @@ -13791,11 +15255,12 @@ async def test_batch_get_assets_history_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetAssetsHistoryResponse() + ) await client.batch_get_assets_history(request=None) # Establish that the underlying stub method was called. @@ -13816,17 +15281,17 @@ async def test_create_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) await client.create_feed(request=None) # Establish that the underlying stub method was called. @@ -13847,17 +15312,17 @@ async def test_get_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) await client.get_feed(request=None) # Establish that the underlying stub method was called. @@ -13878,12 +15343,11 @@ async def test_list_feeds_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) await client.list_feeds(request=None) # Establish that the underlying stub method was called. @@ -13904,17 +15368,17 @@ async def test_update_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) await client.update_feed(request=None) # Establish that the underlying stub method was called. @@ -13935,9 +15399,7 @@ async def test_delete_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_feed(request=None) @@ -13961,12 +15423,14 @@ async def test_search_all_resources_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse( + next_page_token="next_page_token_value", + ) + ) await client.search_all_resources(request=None) # Establish that the underlying stub method was called. @@ -13988,12 +15452,14 @@ async def test_search_all_iam_policies_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) await client.search_all_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -14015,12 +15481,14 @@ async def test_analyze_iam_policy_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + ) await client.analyze_iam_policy(request=None) # Establish that the underlying stub method was called. @@ -14042,11 +15510,11 @@ async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.analyze_iam_policy_longrunning(request=None) @@ -14068,12 +15536,11 @@ async def test_analyze_move_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeMoveResponse() + ) await client.analyze_move(request=None) # Establish that the underlying stub method was called. @@ -14094,14 +15561,14 @@ async def test_query_assets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.QueryAssetsResponse( + job_reference="job_reference_value", + done=True, + ) + ) await client.query_assets(request=None) # Establish that the underlying stub method was called. @@ -14123,15 +15590,17 @@ async def test_create_saved_query_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) await client.create_saved_query(request=None) # Establish that the underlying stub method was called. @@ -14152,16 +15621,16 @@ async def test_get_saved_query_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) await client.get_saved_query(request=None) # Establish that the underlying stub method was called. @@ -14183,12 +15652,14 @@ async def test_list_saved_queries_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_saved_queries(request=None) # Establish that the underlying stub method was called. @@ -14210,15 +15681,17 @@ async def test_update_saved_query_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) await client.update_saved_query(request=None) # Establish that the underlying stub method was called. @@ -14240,8 +15713,8 @@ async def test_delete_saved_query_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_saved_query(request=None) @@ -14265,11 +15738,12 @@ async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) await client.batch_get_effective_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -14291,12 +15765,14 @@ async def test_analyze_org_policies_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) await client.analyze_org_policies(request=None) # Establish that the underlying stub method was called. @@ -14318,12 +15794,14 @@ async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token="next_page_token_value", + ) + ) await client.analyze_org_policy_governed_containers(request=None) # Establish that the underlying stub method was called. @@ -14345,12 +15823,14 @@ async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token="next_page_token_value", + ) + ) await client.analyze_org_policy_governed_assets(request=None) # Establish that the underlying stub method was called. @@ -14370,18 +15850,19 @@ def test_transport_kind_rest(): def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14390,30 +15871,32 @@ def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsR client.export_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ExportAssetsRequest, + dict, + ], +) def test_export_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_assets(request) @@ -14426,20 +15909,31 @@ def test_export_assets_rest_call_success(request_type): def test_export_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AssetServiceRestInterceptor, "post_export_assets" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_export_assets" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + pb_message = asset_service.ExportAssetsRequest.pb( + asset_service.ExportAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14454,7 +15948,7 @@ def test_export_assets_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.ExportAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14462,7 +15956,13 @@ def test_export_assets_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.export_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14471,18 +15971,19 @@ def test_export_assets_rest_interceptors(null_interceptor): def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14491,25 +15992,27 @@ def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsReque client.list_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListAssetsRequest, + dict, + ], +) def test_list_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -14519,33 +16022,43 @@ def test_list_assets_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.ListAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_assets(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_assets" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_list_assets" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + pb_message = asset_service.ListAssetsRequest.pb( + asset_service.ListAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14556,11 +16069,13 @@ def test_list_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + return_value = asset_service.ListAssetsResponse.to_json( + asset_service.ListAssetsResponse() + ) req.return_value.content = return_value request = asset_service.ListAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14568,27 +16083,36 @@ def test_list_assets_rest_interceptors(null_interceptor): post.return_value = asset_service.ListAssetsResponse() post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_batch_get_assets_history_rest_bad_request( + request_type=asset_service.BatchGetAssetsHistoryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14597,25 +16121,26 @@ def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.Ba client.batch_get_assets_history(request) -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetAssetsHistoryRequest, + dict, + ], +) def test_batch_get_assets_history_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( - ) + return_value = asset_service.BatchGetAssetsHistoryResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14624,7 +16149,7 @@ def test_batch_get_assets_history_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_assets_history(request) @@ -14637,19 +16162,30 @@ def test_batch_get_assets_history_rest_call_success(request_type): def test_batch_get_assets_history_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_batch_get_assets_history" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_batch_get_assets_history_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + pb_message = asset_service.BatchGetAssetsHistoryRequest.pb( + asset_service.BatchGetAssetsHistoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14660,19 +16196,30 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + return_value = asset_service.BatchGetAssetsHistoryResponse.to_json( + asset_service.BatchGetAssetsHistoryResponse() + ) req.return_value.content = return_value request = asset_service.BatchGetAssetsHistoryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetAssetsHistoryResponse() - post_with_metadata.return_value = asset_service.BatchGetAssetsHistoryResponse(), metadata + post_with_metadata.return_value = ( + asset_service.BatchGetAssetsHistoryResponse(), + metadata, + ) - client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.batch_get_assets_history( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14681,18 +16228,19 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14701,29 +16249,31 @@ def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedReque client.create_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateFeedRequest, + dict, + ], +) def test_create_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -14733,37 +16283,47 @@ def test_create_feed_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_feed(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_feed" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_create_feed" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + pb_message = asset_service.CreateFeedRequest.pb( + asset_service.CreateFeedRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14778,7 +16338,7 @@ def test_create_feed_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.CreateFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14786,7 +16346,13 @@ def test_create_feed_rest_interceptors(null_interceptor): post.return_value = asset_service.Feed() post_with_metadata.return_value = asset_service.Feed(), metadata - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14795,18 +16361,19 @@ def test_create_feed_rest_interceptors(null_interceptor): def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14815,29 +16382,31 @@ def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): client.get_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetFeedRequest, + dict, + ], +) def test_get_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -14847,33 +16416,41 @@ def test_get_feed_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_feed(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_feed" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_get_feed" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14892,7 +16469,7 @@ def test_get_feed_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.GetFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14900,7 +16477,13 @@ def test_get_feed_rest_interceptors(null_interceptor): post.return_value = asset_service.Feed() post_with_metadata.return_value = asset_service.Feed(), metadata - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14909,18 +16492,19 @@ def test_get_feed_rest_interceptors(null_interceptor): def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14929,25 +16513,26 @@ def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest client.list_feeds(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListFeedsRequest, + dict, + ], +) def test_list_feeds_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( - ) + return_value = asset_service.ListFeedsResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14956,7 +16541,7 @@ def test_list_feeds_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.ListFeedsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_feeds(request) @@ -14969,15 +16554,23 @@ def test_list_feeds_rest_call_success(request_type): def test_list_feeds_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_feeds" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_list_feeds" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14992,11 +16585,13 @@ def test_list_feeds_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + return_value = asset_service.ListFeedsResponse.to_json( + asset_service.ListFeedsResponse() + ) req.return_value.content = return_value request = asset_service.ListFeedsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15004,7 +16599,13 @@ def test_list_feeds_rest_interceptors(null_interceptor): post.return_value = asset_service.ListFeedsResponse() post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_feeds( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15013,18 +16614,19 @@ def test_list_feeds_rest_interceptors(null_interceptor): def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {"feed": {"name": "sample1/sample2/feeds/sample3"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15033,29 +16635,31 @@ def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedReque client.update_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateFeedRequest, + dict, + ], +) def test_update_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {"feed": {"name": "sample1/sample2/feeds/sample3"}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -15065,37 +16669,47 @@ def test_update_feed_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_feed(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_feed" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_update_feed" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + pb_message = asset_service.UpdateFeedRequest.pb( + asset_service.UpdateFeedRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15110,7 +16724,7 @@ def test_update_feed_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.UpdateFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15118,7 +16732,13 @@ def test_update_feed_rest_interceptors(null_interceptor): post.return_value = asset_service.Feed() post_with_metadata.return_value = asset_service.Feed(), metadata - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15127,18 +16747,19 @@ def test_update_feed_rest_interceptors(null_interceptor): def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15147,30 +16768,32 @@ def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedReque client.delete_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteFeedRequest, + dict, + ], +) def test_delete_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_feed(request) @@ -15183,15 +16806,23 @@ def test_delete_feed_rest_call_success(request_type): def test_delete_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_delete_feed" + ) as pre: pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + pb_message = asset_service.DeleteFeedRequest.pb( + asset_service.DeleteFeedRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15204,31 +16835,40 @@ def test_delete_feed_rest_interceptors(null_interceptor): req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = asset_service.DeleteFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() -def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): +def test_search_all_resources_rest_bad_request( + request_type=asset_service.SearchAllResourcesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15237,25 +16877,27 @@ def test_search_all_resources_rest_bad_request(request_type=asset_service.Search client.search_all_resources(request) -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllResourcesRequest, + dict, + ], +) def test_search_all_resources_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -15265,33 +16907,44 @@ def test_search_all_resources_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.SearchAllResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_resources(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_resources_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_search_all_resources" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_resources_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_search_all_resources" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + pb_message = asset_service.SearchAllResourcesRequest.pb( + asset_service.SearchAllResourcesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15302,39 +16955,53 @@ def test_search_all_resources_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + return_value = asset_service.SearchAllResourcesResponse.to_json( + asset_service.SearchAllResourcesResponse() + ) req.return_value.content = return_value request = asset_service.SearchAllResourcesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllResourcesResponse() - post_with_metadata.return_value = asset_service.SearchAllResourcesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.SearchAllResourcesResponse(), + metadata, + ) - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.search_all_resources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): +def test_search_all_iam_policies_rest_bad_request( + request_type=asset_service.SearchAllIamPoliciesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15343,25 +17010,27 @@ def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.Sea client.search_all_iam_policies(request) -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllIamPoliciesRequest, + dict, + ], +) def test_search_all_iam_policies_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -15371,33 +17040,44 @@ def test_search_all_iam_policies_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_iam_policies(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_search_all_iam_policies" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_iam_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + pb_message = asset_service.SearchAllIamPoliciesRequest.pb( + asset_service.SearchAllIamPoliciesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15408,39 +17088,53 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + return_value = asset_service.SearchAllIamPoliciesResponse.to_json( + asset_service.SearchAllIamPoliciesResponse() + ) req.return_value.content = return_value request = asset_service.SearchAllIamPoliciesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllIamPoliciesResponse() - post_with_metadata.return_value = asset_service.SearchAllIamPoliciesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.SearchAllIamPoliciesResponse(), + metadata, + ) - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.search_all_iam_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_iam_policy_rest_bad_request( + request_type=asset_service.AnalyzeIamPolicyRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15449,25 +17143,27 @@ def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeI client.analyze_iam_policy(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyRequest, + dict, + ], +) def test_analyze_iam_policy_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, + fully_explored=True, ) # Wrap the value into a proper Response obj @@ -15477,7 +17173,7 @@ def test_analyze_iam_policy_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy(request) @@ -15491,19 +17187,29 @@ def test_analyze_iam_policy_rest_call_success(request_type): def test_analyze_iam_policy_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_iam_policy" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + pb_message = asset_service.AnalyzeIamPolicyRequest.pb( + asset_service.AnalyzeIamPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15514,39 +17220,53 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + return_value = asset_service.AnalyzeIamPolicyResponse.to_json( + asset_service.AnalyzeIamPolicyResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeIamPolicyRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeIamPolicyResponse() - post_with_metadata.return_value = asset_service.AnalyzeIamPolicyResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeIamPolicyResponse(), + metadata, + ) - client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_iam_policy_longrunning_rest_bad_request( + request_type=asset_service.AnalyzeIamPolicyLongrunningRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15555,30 +17275,32 @@ def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_serv client.analyze_iam_policy_longrunning(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, + ], +) def test_analyze_iam_policy_longrunning_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy_longrunning(request) @@ -15591,20 +17313,32 @@ def test_analyze_iam_policy_longrunning_rest_call_success(request_type): def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_iam_policy_longrunning_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) + pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb( + asset_service.AnalyzeIamPolicyLongrunningRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15619,7 +17353,7 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.AnalyzeIamPolicyLongrunningRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15627,7 +17361,13 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_iam_policy_longrunning( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15636,18 +17376,19 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} + request_init = {"resource": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15656,25 +17397,26 @@ def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveReq client.analyze_move(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeMoveRequest, + dict, + ], +) def test_analyze_move_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} + request_init = {"resource": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeMoveResponse( - ) + return_value = asset_service.AnalyzeMoveResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15683,7 +17425,7 @@ def test_analyze_move_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.AnalyzeMoveResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_move(request) @@ -15696,19 +17438,29 @@ def test_analyze_move_rest_call_success(request_type): def test_analyze_move_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_move" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_move" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) + pb_message = asset_service.AnalyzeMoveRequest.pb( + asset_service.AnalyzeMoveRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15719,11 +17471,13 @@ def test_analyze_move_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) + return_value = asset_service.AnalyzeMoveResponse.to_json( + asset_service.AnalyzeMoveResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeMoveRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15731,7 +17485,13 @@ def test_analyze_move_rest_interceptors(null_interceptor): post.return_value = asset_service.AnalyzeMoveResponse() post_with_metadata.return_value = asset_service.AnalyzeMoveResponse(), metadata - client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_move( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15740,18 +17500,19 @@ def test_analyze_move_rest_interceptors(null_interceptor): def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15760,26 +17521,28 @@ def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsReq client.query_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.QueryAssetsRequest, + dict, + ], +) def test_query_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, + job_reference="job_reference_value", + done=True, ) # Wrap the value into a proper Response obj @@ -15789,14 +17552,14 @@ def test_query_assets_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.query_assets(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -15804,19 +17567,29 @@ def test_query_assets_rest_call_success(request_type): def test_query_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_query_assets" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_query_assets" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) + pb_message = asset_service.QueryAssetsRequest.pb( + asset_service.QueryAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15827,11 +17600,13 @@ def test_query_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) + return_value = asset_service.QueryAssetsResponse.to_json( + asset_service.QueryAssetsResponse() + ) req.return_value.content = return_value request = asset_service.QueryAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15839,27 +17614,36 @@ def test_query_assets_rest_interceptors(null_interceptor): post.return_value = asset_service.QueryAssetsResponse() post_with_metadata.return_value = asset_service.QueryAssetsResponse(), metadata - client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.query_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): +def test_create_saved_query_rest_bad_request( + request_type=asset_service.CreateSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15868,19 +17652,49 @@ def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSa client.create_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateSavedQueryRequest, + dict, + ], +) def test_create_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + request_init = {"parent": "sample1/sample2"} + request_init["saved_query"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "creator": "creator_value", + "last_update_time": {}, + "last_updater": "last_updater_value", + "labels": {}, + "content": { + "iam_policy_analysis_query": { + "scope": "scope_value", + "resource_selector": {"full_resource_name": "full_resource_name_value"}, + "identity_selector": {"identity": "identity_value"}, + "access_selector": { + "roles": ["roles_value1", "roles_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + }, + "options": { + "expand_groups": True, + "expand_roles": True, + "expand_resources": True, + "output_resource_edges": True, + "output_group_edges": True, + "analyze_service_account_impersonation": True, + }, + "condition_context": {"access_time": {}}, + } + }, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -15900,7 +17714,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -15914,7 +17728,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER + for field, value in request_init["saved_query"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -15929,12 +17743,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -15947,13 +17765,13 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -15963,36 +17781,46 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_saved_query(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_saved_query" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_saved_query_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_create_saved_query" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) + pb_message = asset_service.CreateSavedQueryRequest.pb( + asset_service.CreateSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16007,7 +17835,7 @@ def test_create_saved_query_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.CreateSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -16015,27 +17843,36 @@ def test_create_saved_query_rest_interceptors(null_interceptor): post.return_value = asset_service.SavedQuery() post_with_metadata.return_value = asset_service.SavedQuery(), metadata - client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): +def test_get_saved_query_rest_bad_request( + request_type=asset_service.GetSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16044,28 +17881,30 @@ def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQue client.get_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetSavedQueryRequest, + dict, + ], +) def test_get_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -16075,36 +17914,46 @@ def test_get_saved_query_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_saved_query(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_saved_query" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_get_saved_query" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) + pb_message = asset_service.GetSavedQueryRequest.pb( + asset_service.GetSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16119,7 +17968,7 @@ def test_get_saved_query_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.GetSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -16127,27 +17976,36 @@ def test_get_saved_query_rest_interceptors(null_interceptor): post.return_value = asset_service.SavedQuery() post_with_metadata.return_value = asset_service.SavedQuery(), metadata - client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): +def test_list_saved_queries_rest_bad_request( + request_type=asset_service.ListSavedQueriesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16156,25 +18014,27 @@ def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSave client.list_saved_queries(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListSavedQueriesRequest, + dict, + ], +) def test_list_saved_queries_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16184,33 +18044,43 @@ def test_list_saved_queries_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_saved_queries(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_saved_queries" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_saved_queries_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_list_saved_queries" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) + pb_message = asset_service.ListSavedQueriesRequest.pb( + asset_service.ListSavedQueriesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16221,39 +18091,53 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) + return_value = asset_service.ListSavedQueriesResponse.to_json( + asset_service.ListSavedQueriesResponse() + ) req.return_value.content = return_value request = asset_service.ListSavedQueriesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.ListSavedQueriesResponse() - post_with_metadata.return_value = asset_service.ListSavedQueriesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.ListSavedQueriesResponse(), + metadata, + ) - client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_saved_queries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): +def test_update_saved_query_rest_bad_request( + request_type=asset_service.UpdateSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init = {"saved_query": {"name": "sample1/sample2/savedQueries/sample3"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16262,19 +18146,49 @@ def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSa client.update_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateSavedQueryRequest, + dict, + ], +) def test_update_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + request_init = {"saved_query": {"name": "sample1/sample2/savedQueries/sample3"}} + request_init["saved_query"] = { + "name": "sample1/sample2/savedQueries/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "creator": "creator_value", + "last_update_time": {}, + "last_updater": "last_updater_value", + "labels": {}, + "content": { + "iam_policy_analysis_query": { + "scope": "scope_value", + "resource_selector": {"full_resource_name": "full_resource_name_value"}, + "identity_selector": {"identity": "identity_value"}, + "access_selector": { + "roles": ["roles_value1", "roles_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + }, + "options": { + "expand_groups": True, + "expand_roles": True, + "expand_resources": True, + "output_resource_edges": True, + "output_group_edges": True, + "analyze_service_account_impersonation": True, + }, + "condition_context": {"access_time": {}}, + } + }, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -16294,7 +18208,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -16308,7 +18222,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER + for field, value in request_init["saved_query"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -16323,12 +18237,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -16341,13 +18259,13 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -16357,36 +18275,46 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_saved_query(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_saved_query" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_saved_query_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_update_saved_query" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) + pb_message = asset_service.UpdateSavedQueryRequest.pb( + asset_service.UpdateSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16401,7 +18329,7 @@ def test_update_saved_query_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.UpdateSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -16409,27 +18337,36 @@ def test_update_saved_query_rest_interceptors(null_interceptor): post.return_value = asset_service.SavedQuery() post_with_metadata.return_value = asset_service.SavedQuery(), metadata - client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): +def test_delete_saved_query_rest_bad_request( + request_type=asset_service.DeleteSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16438,30 +18375,32 @@ def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSa client.delete_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteSavedQueryRequest, + dict, + ], +) def test_delete_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_saved_query(request) @@ -16474,15 +18413,23 @@ def test_delete_saved_query_rest_call_success(request_type): def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_delete_saved_query" + ) as pre: pre.assert_not_called() - pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) + pb_message = asset_service.DeleteSavedQueryRequest.pb( + asset_service.DeleteSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16495,31 +18442,40 @@ def test_delete_saved_query_rest_interceptors(null_interceptor): req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = asset_service.DeleteSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() -def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): +def test_batch_get_effective_iam_policies_rest_bad_request( + request_type=asset_service.BatchGetEffectiveIamPoliciesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16528,34 +18484,37 @@ def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_se client.batch_get_effective_iam_policies(request) -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, + ], +) def test_batch_get_effective_iam_policies_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_effective_iam_policies(request) @@ -16568,19 +18527,30 @@ def test_batch_get_effective_iam_policies_rest_call_success(request_type): def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_batch_get_effective_iam_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) + pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb( + asset_service.BatchGetEffectiveIamPoliciesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16591,39 +18561,53 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) req.return_value.content = return_value request = asset_service.BatchGetEffectiveIamPoliciesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - post_with_metadata.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.BatchGetEffectiveIamPoliciesResponse(), + metadata, + ) - client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.batch_get_effective_iam_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_org_policies_rest_bad_request(request_type=asset_service.AnalyzeOrgPoliciesRequest): +def test_analyze_org_policies_rest_bad_request( + request_type=asset_service.AnalyzeOrgPoliciesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16632,25 +18616,27 @@ def test_analyze_org_policies_rest_bad_request(request_type=asset_service.Analyz client.analyze_org_policies(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, + ], +) def test_analyze_org_policies_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16660,33 +18646,44 @@ def test_analyze_org_policies_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policies(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_org_policies" + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_org_policies" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) + pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb( + asset_service.AnalyzeOrgPoliciesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16697,39 +18694,53 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) + return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json( + asset_service.AnalyzeOrgPoliciesResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeOrgPoliciesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPoliciesResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPoliciesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPoliciesResponse(), + metadata, + ) - client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): +def test_analyze_org_policy_governed_containers_rest_bad_request( + request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16738,25 +18749,27 @@ def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=as client.analyze_org_policy_governed_containers(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, + ], +) def test_analyze_org_policy_governed_containers_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16764,35 +18777,50 @@ def test_analyze_org_policy_governed_containers_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_containers(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_containers", + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_containers_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, + "pre_analyze_org_policy_governed_containers", + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16803,39 +18831,53 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), + metadata, + ) - client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_containers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): +def test_analyze_org_policy_governed_assets_rest_bad_request( + request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16844,25 +18886,27 @@ def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_ client.analyze_org_policy_governed_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, + ], +) def test_analyze_org_policy_governed_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16870,35 +18914,49 @@ def test_analyze_org_policy_governed_assets_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_assets(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_assets", + ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_assets_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets" + ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16909,38 +18967,55 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), + metadata, + ) - client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) + request = json_format.ParseDict( + {"name": "sample1/sample2/operations/sample3/sample4"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -16949,20 +19024,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} + request_init = {"name": "sample1/sample2/operations/sample3/sample4"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -16970,7 +19048,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -16980,10 +19058,10 @@ def test_get_operation_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -16997,9 +19075,7 @@ def test_export_assets_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: client.export_assets(request=None) # Establish that the underlying stub method was called. @@ -17019,9 +19095,7 @@ def test_list_assets_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: client.list_assets(request=None) # Establish that the underlying stub method was called. @@ -17042,8 +19116,8 @@ def test_batch_get_assets_history_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: client.batch_get_assets_history(request=None) # Establish that the underlying stub method was called. @@ -17063,9 +19137,7 @@ def test_create_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: client.create_feed(request=None) # Establish that the underlying stub method was called. @@ -17085,9 +19157,7 @@ def test_get_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: client.get_feed(request=None) # Establish that the underlying stub method was called. @@ -17107,9 +19177,7 @@ def test_list_feeds_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: client.list_feeds(request=None) # Establish that the underlying stub method was called. @@ -17129,9 +19197,7 @@ def test_update_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: client.update_feed(request=None) # Establish that the underlying stub method was called. @@ -17151,9 +19217,7 @@ def test_delete_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: client.delete_feed(request=None) # Establish that the underlying stub method was called. @@ -17174,8 +19238,8 @@ def test_search_all_resources_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: client.search_all_resources(request=None) # Establish that the underlying stub method was called. @@ -17196,8 +19260,8 @@ def test_search_all_iam_policies_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: client.search_all_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -17218,8 +19282,8 @@ def test_analyze_iam_policy_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: client.analyze_iam_policy(request=None) # Establish that the underlying stub method was called. @@ -17240,8 +19304,8 @@ def test_analyze_iam_policy_longrunning_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: client.analyze_iam_policy_longrunning(request=None) # Establish that the underlying stub method was called. @@ -17261,9 +19325,7 @@ def test_analyze_move_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: client.analyze_move(request=None) # Establish that the underlying stub method was called. @@ -17283,9 +19345,7 @@ def test_query_assets_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: client.query_assets(request=None) # Establish that the underlying stub method was called. @@ -17306,8 +19366,8 @@ def test_create_saved_query_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: client.create_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17327,9 +19387,7 @@ def test_get_saved_query_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: client.get_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17350,8 +19408,8 @@ def test_list_saved_queries_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: client.list_saved_queries(request=None) # Establish that the underlying stub method was called. @@ -17372,8 +19430,8 @@ def test_update_saved_query_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: client.update_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17394,8 +19452,8 @@ def test_delete_saved_query_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: client.delete_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17416,8 +19474,8 @@ def test_batch_get_effective_iam_policies_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: client.batch_get_effective_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -17438,8 +19496,8 @@ def test_analyze_org_policies_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: client.analyze_org_policies(request=None) # Establish that the underlying stub method was called. @@ -17460,8 +19518,8 @@ def test_analyze_org_policy_governed_containers_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: client.analyze_org_policy_governed_containers(request=None) # Establish that the underlying stub method was called. @@ -17482,8 +19540,8 @@ def test_analyze_org_policy_governed_assets_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: client.analyze_org_policy_governed_assets(request=None) # Establish that the underlying stub method was called. @@ -17504,12 +19562,13 @@ def test_asset_service_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AssetServiceClient( @@ -17520,18 +19579,21 @@ def test_transport_grpc_default(): transports.AssetServiceGrpcTransport, ) + def test_asset_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AssetServiceTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_asset_service_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: + with mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.AssetServiceTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -17540,30 +19602,30 @@ def test_asset_service_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'export_assets', - 'list_assets', - 'batch_get_assets_history', - 'create_feed', - 'get_feed', - 'list_feeds', - 'update_feed', - 'delete_feed', - 'search_all_resources', - 'search_all_iam_policies', - 'analyze_iam_policy', - 'analyze_iam_policy_longrunning', - 'analyze_move', - 'query_assets', - 'create_saved_query', - 'get_saved_query', - 'list_saved_queries', - 'update_saved_query', - 'delete_saved_query', - 'batch_get_effective_iam_policies', - 'analyze_org_policies', - 'analyze_org_policy_governed_containers', - 'analyze_org_policy_governed_assets', - 'get_operation', + "export_assets", + "list_assets", + "batch_get_assets_history", + "create_feed", + "get_feed", + "list_feeds", + "update_feed", + "delete_feed", + "search_all_resources", + "search_all_iam_policies", + "analyze_iam_policy", + "analyze_iam_policy_longrunning", + "analyze_move", + "query_assets", + "create_saved_query", + "get_saved_query", + "list_saved_queries", + "update_saved_query", + "delete_saved_query", + "batch_get_effective_iam_policies", + "analyze_org_policies", + "analyze_org_policy_governed_containers", + "analyze_org_policy_governed_assets", + "get_operation", ) for method in methods: with pytest.raises(NotImplementedError): @@ -17579,7 +19641,7 @@ def test_asset_service_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -17588,25 +19650,30 @@ def test_asset_service_base_transport(): def test_asset_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AssetServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_asset_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AssetServiceTransport() @@ -17615,14 +19682,12 @@ def test_asset_service_base_transport_with_adc(): def test_asset_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) AssetServiceClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -17637,12 +19702,12 @@ def test_asset_service_auth_adc(): def test_asset_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @@ -17656,48 +19721,45 @@ def test_asset_service_transport_auth_adc(transport_class): ], ) def test_asset_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.AssetServiceGrpcTransport, grpc_helpers), - (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async) + (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_asset_service_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "cloudasset.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="cloudasset.googleapis.com", ssl_credentials=None, @@ -17708,10 +19770,11 @@ def test_asset_service_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport], +) +def test_asset_service_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -17720,7 +19783,7 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -17741,61 +19804,77 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) + def test_asset_service_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssetServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AssetServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_asset_service_host_no_port(transport_name): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="cloudasset.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com' + "cloudasset.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_asset_service_host_with_port(transport_name): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="cloudasset.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'cloudasset.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com:8000' + "cloudasset.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_asset_service_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -17876,8 +19955,10 @@ def test_asset_service_client_transport_session_collision(transport_name): session1 = client1.transport.analyze_org_policy_governed_assets._session session2 = client2.transport.analyze_org_policy_governed_assets._session assert session1 != session2 + + def test_asset_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AssetServiceGrpcTransport( @@ -17890,7 +19971,7 @@ def test_asset_service_grpc_transport_channel(): def test_asset_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AssetServiceGrpcAsyncIOTransport( @@ -17905,12 +19986,17 @@ def test_asset_service_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: +@pytest.mark.parametrize( + "transport_class", + [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport], +) +def test_asset_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -17919,7 +20005,7 @@ def test_asset_service_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -17949,17 +20035,20 @@ def test_asset_service_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport], +) +def test_asset_service_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -17990,7 +20079,7 @@ def test_asset_service_transport_channel_mtls_with_adc( def test_asset_service_grpc_lro_client(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -18007,7 +20096,7 @@ def test_asset_service_grpc_lro_client(): def test_asset_service_grpc_lro_async_client(): client = AssetServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -18024,7 +20113,10 @@ def test_asset_service_grpc_lro_async_client(): def test_access_level_path(): access_policy = "squid" access_level = "clam" - expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format( + access_policy=access_policy, + access_level=access_level, + ) actual = AssetServiceClient.access_level_path(access_policy, access_level) assert expected == actual @@ -18040,9 +20132,12 @@ def test_parse_access_level_path(): actual = AssetServiceClient.parse_access_level_path(path) assert expected == actual + def test_access_policy_path(): access_policy = "oyster" - expected = "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + expected = "accessPolicies/{access_policy}".format( + access_policy=access_policy, + ) actual = AssetServiceClient.access_policy_path(access_policy) assert expected == actual @@ -18057,6 +20152,7 @@ def test_parse_access_policy_path(): actual = AssetServiceClient.parse_access_policy_path(path) assert expected == actual + def test_asset_path(): expected = "*".format() actual = AssetServiceClient.asset_path() @@ -18064,18 +20160,21 @@ def test_asset_path(): def test_parse_asset_path(): - expected = { - } + expected = {} path = AssetServiceClient.asset_path(**expected) # Check that the path construction is reversible. actual = AssetServiceClient.parse_asset_path(path) assert expected == actual + def test_feed_path(): project = "cuttlefish" feed = "mussel" - expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) + expected = "projects/{project}/feeds/{feed}".format( + project=project, + feed=feed, + ) actual = AssetServiceClient.feed_path(project, feed) assert expected == actual @@ -18091,11 +20190,18 @@ def test_parse_feed_path(): actual = AssetServiceClient.parse_feed_path(path) assert expected == actual + def test_inventory_path(): project = "scallop" location = "abalone" instance = "squid" - expected = "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + expected = ( + "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) + ) actual = AssetServiceClient.inventory_path(project, location, instance) assert expected == actual @@ -18112,10 +20218,14 @@ def test_parse_inventory_path(): actual = AssetServiceClient.parse_inventory_path(path) assert expected == actual + def test_saved_query_path(): project = "oyster" saved_query = "nudibranch" - expected = "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + expected = "projects/{project}/savedQueries/{saved_query}".format( + project=project, + saved_query=saved_query, + ) actual = AssetServiceClient.saved_query_path(project, saved_query) assert expected == actual @@ -18131,10 +20241,16 @@ def test_parse_saved_query_path(): actual = AssetServiceClient.parse_saved_query_path(path) assert expected == actual + def test_service_perimeter_path(): access_policy = "winkle" service_perimeter = "nautilus" - expected = "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + expected = ( + "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format( + access_policy=access_policy, + service_perimeter=service_perimeter, + ) + ) actual = AssetServiceClient.service_perimeter_path(access_policy, service_perimeter) assert expected == actual @@ -18150,9 +20266,12 @@ def test_parse_service_perimeter_path(): actual = AssetServiceClient.parse_service_perimeter_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = AssetServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -18167,9 +20286,12 @@ def test_parse_common_billing_account_path(): actual = AssetServiceClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = AssetServiceClient.common_folder_path(folder) assert expected == actual @@ -18184,9 +20306,12 @@ def test_parse_common_folder_path(): actual = AssetServiceClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = AssetServiceClient.common_organization_path(organization) assert expected == actual @@ -18201,9 +20326,12 @@ def test_parse_common_organization_path(): actual = AssetServiceClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = AssetServiceClient.common_project_path(project) assert expected == actual @@ -18218,10 +20346,14 @@ def test_parse_common_project_path(): actual = AssetServiceClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = AssetServiceClient.common_location_path(project, location) assert expected == actual @@ -18241,14 +20373,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.AssetServiceTransport, "_prep_wrapped_messages" + ) as prep: client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.AssetServiceTransport, "_prep_wrapped_messages" + ) as prep: transport_class = AssetServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -18259,7 +20395,8 @@ def test_client_with_default_client_info(): def test_get_operation(transport: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -18278,10 +20415,13 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -18303,6 +20443,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_get_operation_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18325,7 +20466,12 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = AssetServiceAsyncClient( @@ -18350,7 +20496,11 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + def test_get_operation_from_dict(): client = AssetServiceClient( @@ -18367,6 +20517,8 @@ def test_get_operation_from_dict(): } ) call.assert_called() + + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = AssetServiceAsyncClient( @@ -18388,10 +20540,11 @@ async def test_get_operation_from_dict_async(): def test_transport_close_grpc(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -18400,10 +20553,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -18411,10 +20565,11 @@ async def test_transport_close_grpc_asyncio(): def test_transport_close_rest(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -18422,13 +20577,12 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', - 'grpc', + "rest", + "grpc", ] for transport in transports: client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -18437,10 +20591,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -18455,7 +20613,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None,