From 8d18441e3b0ef88859b963ba9f08de1ef445c2c2 Mon Sep 17 00:00:00 2001 From: Ben Karl Date: Mon, 4 Apr 2022 13:18:49 -0400 Subject: [PATCH] Changes for march release. (#612) --- ChangeLog | 11 + .../add_performance_max_campaign.py | 148 ++--- examples/basic_operations/add_campaigns.py | 10 +- .../cloud_logging_interceptor.py | 228 +++++++ .../get_campaigns.py | 83 +++ .../requirements.txt | 1 + ...gate_search_result_pages_caching_tokens.py | 213 +++++++ .../add_dynamic_remarketing_asset.py | 5 +- .../remarketing/upload_offline_conversion.py | 73 ++- ...formance_max_product_listing_group_tree.py | 559 ++++++++++++++++++ google/ads/googleads/__init__.py | 2 +- google/ads/googleads/interceptors/__init__.py | 4 +- google/ads/googleads/interceptors/helpers.py | 170 ++++++ .../interceptors/logging_interceptor.py | 254 ++------ .../interceptors/response_wrappers.py | 15 +- setup.py | 18 +- .../interceptors/logging_interceptor_test.py | 74 ++- 17 files changed, 1555 insertions(+), 313 deletions(-) create mode 100644 examples/custom_logging_interceptor/cloud_logging_interceptor.py create mode 100755 examples/custom_logging_interceptor/get_campaigns.py create mode 100644 examples/custom_logging_interceptor/requirements.txt create mode 100755 examples/misc/navigate_search_result_pages_caching_tokens.py create mode 100644 examples/shopping_ads/add_performance_max_product_listing_group_tree.py create mode 100644 google/ads/googleads/interceptors/helpers.py diff --git a/ChangeLog b/ChangeLog index a7fd27e3a..4cde55b8f 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,14 @@ +* 15.1.0 +- Fix issue preventing streaming responses from being logged. +- Expose logging utilities for external interceptor logic. +- Various example updates and improvements. + +* 15.0.0 +- Google Ads API v10_0 release. +- Remove all "get" methods from services. +- Remove services that only contained "get" methods. +- Remove support got Google Ads API v7_0. + * 14.1.0 - Google Ads API v9_0 release - Update gRPC transport logic to replace "gapic" user-agent with "gccl" diff --git a/examples/advanced_operations/add_performance_max_campaign.py b/examples/advanced_operations/add_performance_max_campaign.py index d617f9ef4..6841fb100 100644 --- a/examples/advanced_operations/add_performance_max_campaign.py +++ b/examples/advanced_operations/add_performance_max_campaign.py @@ -57,15 +57,13 @@ # [START add_performance_max_campaign] -def main( - client, - customer_id, -): +def main(client, customer_id, audience_id): """The main method that creates all necessary entities for the example. Args: client: an initialized GoogleAdsClient instance. customer_id: a client customer ID. + audience_id: an optional audience ID. """ # [START add_performance_max_campaign_1] googleads_service = client.get_service("GoogleAdsService") @@ -77,22 +75,11 @@ def main( # # Create the headlines. headline_asset_resource_names = _create_multiple_text_assets( - client, - customer_id, - [ - "Travel", - "Travel Reviews", - "Book travel", - ], + client, customer_id, ["Travel", "Travel Reviews", "Book travel",], ) # Create the descriptions. description_asset_resource_names = _create_multiple_text_assets( - client, - customer_id, - [ - "Take to the air!", - "Fly to the sky!", - ], + client, customer_id, ["Take to the air!", "Fly to the sky!",], ) # The below methods create and return MutateOperations that we later @@ -103,18 +90,13 @@ def main( # successfully or fail entirely, leaving no orphaned entities. See: # https://developers.google.com/google-ads/api/docs/mutating/overview campaign_budget_operation = _create_campaign_budget_operation( - client, - customer_id, + client, customer_id, ) - performance_max_campaign_operation = ( - _create_performance_max_campaign_operation( - client, - customer_id, - ) + performance_max_campaign_operation = _create_performance_max_campaign_operation( + client, customer_id, ) campaign_criterion_operations = _create_campaign_criterion_operations( - client, - customer_id, + client, customer_id, ) asset_group_operations = _create_asset_group_operation( client, @@ -123,28 +105,37 @@ def main( description_asset_resource_names, ) + mutate_operations = [ + # It's important to create these entities in this order because + # they depend on each other. + campaign_budget_operation, + performance_max_campaign_operation, + # Expand the list of multiple operations into the list of + # other mutate operations + *campaign_criterion_operations, + *asset_group_operations, + ] + + # Append an asset group signal operation is an audience ID is given. + if audience_id: + mutate_operations.append( + _create_asset_group_signal_operation( + client, customer_id, audience_id + ) + ) + # Send the operations in a single Mutate request. response = googleads_service.mutate( - customer_id=customer_id, - mutate_operations=[ - # It's important to create these entities in this order because - # they depend on each other. - campaign_budget_operation, - performance_max_campaign_operation, - # Expand the list of multiple operations into the list of - # other mutate operations - *campaign_criterion_operations, - *asset_group_operations, - ], + customer_id=customer_id, mutate_operations=mutate_operations ) + _print_response_details(response) # [END add_performance_max_campaign_1] # [START add_performance_max_campaign_2] def _create_campaign_budget_operation( - client, - customer_id, + client, customer_id, ): """Creates a MutateOperation that creates a new CampaignBudget. @@ -182,8 +173,7 @@ def _create_campaign_budget_operation( # [START add_performance_max_campaign_3] def _create_performance_max_campaign_operation( - client, - customer_id, + client, customer_id, ): """Creates a MutateOperation that creates a new Performance Max campaign. @@ -252,8 +242,7 @@ def _create_performance_max_campaign_operation( # [START add_performance_max_campaign_4] def _create_campaign_criterion_operations( - client, - customer_id, + client, customer_id, ): """Creates a list of MutateOperations that create new campaign criteria. @@ -284,8 +273,8 @@ def _create_campaign_criterion_operations( campaign_criterion.campaign = campaign_service.campaign_path( customer_id, _PERFORMANCE_MAX_CAMPAIGN_TEMPORARY_ID ) - campaign_criterion.location.geo_target_constant = ( - geo_target_constant_service.geo_target_constant_path("1023191") + campaign_criterion.location.geo_target_constant = geo_target_constant_service.geo_target_constant_path( + "1023191" ) campaign_criterion.negative = False operations.append(mutate_operation) @@ -296,8 +285,8 @@ def _create_campaign_criterion_operations( campaign_criterion.campaign = campaign_service.campaign_path( customer_id, _PERFORMANCE_MAX_CAMPAIGN_TEMPORARY_ID ) - campaign_criterion.location.geo_target_constant = ( - geo_target_constant_service.geo_target_constant_path("1022762") + campaign_criterion.location.geo_target_constant = geo_target_constant_service.geo_target_constant_path( + "1022762" ) campaign_criterion.negative = True operations.append(mutate_operation) @@ -311,9 +300,9 @@ def _create_campaign_criterion_operations( # Set the language. # For a list of all language codes, see: # https://developers.google.com/google-ads/api/reference/data/codes-formats#expandable-7 - campaign_criterion.language.language_constant = ( - googleads_service.language_constant_path("1000") # English - ) + campaign_criterion.language.language_constant = googleads_service.language_constant_path( + "1000" + ) # English operations.append(mutate_operation) return operations @@ -346,8 +335,7 @@ def _create_multiple_text_assets(client, customer_id, texts): # Send the operations in a single Mutate request. response = googleads_service.mutate( - customer_id=customer_id, - mutate_operations=operations, + customer_id=customer_id, mutate_operations=operations, ) asset_resource_names = [] for result in response.mutate_operation_responses: @@ -395,8 +383,7 @@ def _create_asset_group_operation( asset_group.final_mobile_urls.append("http://www.example.com") asset_group.status = client.enums.AssetGroupStatusEnum.PAUSED asset_group.resource_name = asset_group_service.asset_group_path( - customer_id, - _ASSET_GROUP_TEMPORARY_ID, + customer_id, _ASSET_GROUP_TEMPORARY_ID, ) operations.append(mutate_operation) @@ -420,8 +407,7 @@ def _create_asset_group_operation( asset_group_asset = mutate_operation.asset_group_asset_operation.create asset_group_asset.field_type = client.enums.AssetFieldTypeEnum.HEADLINE asset_group_asset.asset_group = asset_group_service.asset_group_path( - customer_id, - _ASSET_GROUP_TEMPORARY_ID, + customer_id, _ASSET_GROUP_TEMPORARY_ID, ) asset_group_asset.asset = resource_name operations.append(mutate_operation) @@ -434,8 +420,7 @@ def _create_asset_group_operation( client.enums.AssetFieldTypeEnum.DESCRIPTION ) asset_group_asset.asset_group = asset_group_service.asset_group_path( - customer_id, - _ASSET_GROUP_TEMPORARY_ID, + customer_id, _ASSET_GROUP_TEMPORARY_ID, ) asset_group_asset.asset = resource_name operations.append(mutate_operation) @@ -523,8 +508,7 @@ def _create_and_link_text_asset(client, customer_id, text, field_type): asset_group_asset = mutate_operation.asset_group_asset_operation.create asset_group_asset.field_type = field_type asset_group_asset.asset_group = asset_group_service.asset_group_path( - customer_id, - _ASSET_GROUP_TEMPORARY_ID, + customer_id, _ASSET_GROUP_TEMPORARY_ID, ) asset_group_asset.asset = asset_service.asset_path( customer_id, next_temp_id @@ -574,8 +558,7 @@ def _create_and_link_image_asset( asset_group_asset = mutate_operation.asset_group_asset_operation.create asset_group_asset.field_type = field_type asset_group_asset.asset_group = asset_group_service.asset_group_path( - customer_id, - _ASSET_GROUP_TEMPORARY_ID, + customer_id, _ASSET_GROUP_TEMPORARY_ID, ) asset_group_asset.asset = asset_service.asset_path( customer_id, next_temp_id @@ -624,6 +607,39 @@ def _print_response_details(response): ) +# [START add_performance_max_campaign_9] +def _create_asset_group_signal_operation(client, customer_id, audience_id): + """Creates a list of MutateOperations that may create asset group signals. + + Args: + client: an initialized GoogleAdsClient instance. + customer_id: a client customer ID. + audience_id: an optional audience ID. + + Returns: + MutateOperations that create new asset group signals. + """ + if not audience_id: + return None + + googleads_service = client.get_service("GoogleAdsService") + asset_group_resource_name = googleads_service.asset_group_path( + customer_id, _ASSET_GROUP_TEMPORARY_ID + ) + + mutate_operation = client.get_type("MutateOperation") + operation = mutate_operation.asset_group_signal_operation.create + # To learn more about Audience Signals, see: + # https://developers.google.com/google-ads/api/docs/performance-max/asset-groups#audience_signals + operation.asset_group = asset_group_resource_name + operation.audience.audience = googleads_service.audience_path( + customer_id, audience_id + ) + + return mutate_operation + # [END add_performance_max_campaign_9] + + # [END add_performance_max_campaign] if __name__ == "__main__": @@ -642,14 +658,14 @@ def _print_response_details(response): required=True, help="The Google Ads customer ID.", ) + parser.add_argument( + "-a", "--audience_id", type=str, help="The ID of an audience.", + ) args = parser.parse_args() try: - main( - googleads_client, - args.customer_id, - ) + main(googleads_client, args.customer_id, args.audience_id) except GoogleAdsException as ex: print( f'Request with ID "{ex.request_id}" failed with status ' diff --git a/examples/basic_operations/add_campaigns.py b/examples/basic_operations/add_campaigns.py index c0c92d341..8886b423c 100755 --- a/examples/basic_operations/add_campaigns.py +++ b/examples/basic_operations/add_campaigns.py @@ -46,10 +46,8 @@ def main(client, customer_id): # Add budget. try: - campaign_budget_response = ( - campaign_budget_service.mutate_campaign_budgets( - customer_id=customer_id, operations=[campaign_budget_operation] - ) + campaign_budget_response = campaign_budget_service.mutate_campaign_budgets( + customer_id=customer_id, operations=[campaign_budget_operation] ) except GoogleAdsException as ex: _handle_googleads_exception(ex) @@ -76,8 +74,10 @@ def main(client, customer_id): # Set the campaign network options. campaign.network_settings.target_google_search = True campaign.network_settings.target_search_network = True - campaign.network_settings.target_content_network = False campaign.network_settings.target_partner_search_network = False + # Enable Display Expansion on Search campaigns. For more details see: + # https://support.google.com/google-ads/answer/7193800 + campaign.network_settings.target_content_network = True # [END add_campaigns_1] # Optional: Set the start date. diff --git a/examples/custom_logging_interceptor/cloud_logging_interceptor.py b/examples/custom_logging_interceptor/cloud_logging_interceptor.py new file mode 100644 index 000000000..6aae721b6 --- /dev/null +++ b/examples/custom_logging_interceptor/cloud_logging_interceptor.py @@ -0,0 +1,228 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""A custom gRPC Interceptor that logs requests and responses to Cloud Logging. + +The custom interceptor object is passed into the get_service method of the +GoogleAdsClient. It intercepts requests and responses, parses them into a +human readable structure and logs them using the logging service instantiated +within the class (in this case, a Cloud Logging client). +""" + +import logging +import time + +from google.cloud import logging +from grpc import UnaryUnaryClientInterceptor, UnaryStreamClientInterceptor + +from google.ads.googleads.interceptors import LoggingInterceptor, mask_message + + +class CloudLoggingInterceptor(LoggingInterceptor): + """An interceptor that logs rpc request and response details to Google Cloud Logging. + + This class inherits logic from the LoggingInterceptor, which simplifies the + implementation here. Some logic is required here in order to make the + underlying logic work -- comments make note of this where applicable. + NOTE: Inheriting from the LoggingInterceptor class could yield unexpected side + effects. For example, if the LoggingInterceptor class is updated, this class would + inherit the updated logic, which could affect its functionality. One option to avoid + this is to inherit from the Interceptor class instead, and selectively copy whatever + logic is needed from the LoggingInterceptor class.""" + + def __init__(self, api_version): + """Initializer for the CloudLoggingInterceptor. + + Args: + api_version: a str of the API version of the request. + """ + super().__init__(logger=None, api_version=api_version) + # Instantiate the Cloud Logging client. + logging_client = logging.Client() + self.logger = logging_client.logger("cloud_logging") + + def log_successful_request( + self, + method, + customer_id, + metadata_json, + request_id, + request, + trailing_metadata_json, + response, + ): + """Handles logging of a successful request. + + Args: + method: The method of the request. + customer_id: The customer ID associated with the request. + metadata_json: A JSON str of initial_metadata. + request_id: A unique ID for the request provided in the response. + request: An instance of a request proto message. + trailing_metadata_json: A JSON str of trailing_metadata. + response: A grpc.Call/grpc.Future instance. + """ + # Retrieve and mask the RPC result from the response future. + # This method is available from the LoggingInterceptor class. + # Ensure self._cache is set in order for this to work. + # The response result could contain up to 10,000 rows of data, + # so consider truncating this value before logging it, to save + # on data storage costs and maintain readability. + result = self.retrieve_and_mask_result(response) + + # elapsed_ms is the approximate elapsed time of the RPC, in milliseconds. + # There are different ways to define and measure elapsed time, so use + # whatever approach makes sense for your monitoring purposes. + # rpc_start and rpc_end are set in the intercept_unary_* methods below. + elapsed_ms = (self.rpc_end - self.rpc_start) * 1000 + + debug_log = { + "method": method, + "host": metadata_json, + "request_id": request_id, + "request": str(request), + "headers": trailing_metadata_json, + "response": str(result), + "is_fault": False, + "elapsed_ms": elapsed_ms, + } + self.logger.log_struct(debug_log, severity="DEBUG") + + info_log = { + "customer_id": customer_id, + "method": method, + "request_id": request_id, + "is_fault": False, + # Available from the Interceptor class. + "api_version": self._api_version, + } + self.logger.log_struct(info_log, severity="INFO") + + def log_failed_request( + self, + method, + customer_id, + metadata_json, + request_id, + request, + trailing_metadata_json, + response, + ): + """Handles logging of a failed request. + + Args: + method: The method of the request. + customer_id: The customer ID associated with the request. + metadata_json: A JSON str of initial_metadata. + request_id: A unique ID for the request provided in the response. + request: An instance of a request proto message. + trailing_metadata_json: A JSON str of trailing_metadata. + response: A JSON str of the response message. + """ + exception = self._get_error_from_response(response) + exception_str = self._parse_exception_to_str(exception) + fault_message = self._get_fault_message(exception) + + info_log = { + "method": method, + "endpoint": self.endpoint, + "host": metadata_json, + "request_id": request_id, + "request": str(request), + "headers": trailing_metadata_json, + "exception": exception_str, + "is_fault": True, + } + self.logger.log_struct(info_log, severity="INFO") + + error_log = { + "method": method, + "endpoint": self.endpoint, + "request_id": request_id, + "customer_id": customer_id, + "is_fault": True, + "fault_message": fault_message, + } + self.logger.log_struct(error_log, severity="ERROR") + + def intercept_unary_unary(self, continuation, client_call_details, request): + """Intercepts and logs API interactions. + + Overrides abstract method defined in grpc.UnaryUnaryClientInterceptor. + + Args: + continuation: a function to continue the request process. + client_call_details: a grpc._interceptor._ClientCallDetails + instance containing request metadata. + request: a SearchGoogleAdsRequest or SearchGoogleAdsStreamRequest + message class instance. + + Returns: + A grpc.Call/grpc.Future instance representing a service response. + """ + # Set the rpc_end value to current time when RPC completes. + def update_rpc_end(response_future): + self.rpc_end = time.perf_counter() + + # Capture precise clock time to later calculate approximate elapsed + # time of the RPC. + self.rpc_start = time.perf_counter() + + # The below call is REQUIRED. + response = continuation(client_call_details, request) + + response.add_done_callback(update_rpc_end) + + self.log_request(client_call_details, request, response) + + # The below return is REQUIRED. + return response + + def intercept_unary_stream( + self, continuation, client_call_details, request + ): + """Intercepts and logs API interactions for Unary-Stream requests. + + Overrides abstract method defined in grpc.UnaryStreamClientInterceptor. + + Args: + continuation: a function to continue the request process. + client_call_details: a grpc._interceptor._ClientCallDetails + instance containing request metadata. + request: a SearchGoogleAdsRequest or SearchGoogleAdsStreamRequest + message class instance. + + Returns: + A grpc.Call/grpc.Future instance representing a service response. + """ + + def on_rpc_complete(response_future): + self.rpc_end = time.perf_counter() + self.log_request(client_call_details, request, response_future) + + # Capture precise clock time to later calculate approximate elapsed + # time of the RPC. + self.rpc_start = time.perf_counter() + + # The below call is REQUIRED. + response = continuation(client_call_details, request) + + # Set self._cache to the cache on the response wrapper in order to + # access the streaming logs. This is REQUIRED in order to log streaming + # requests. + self._cache = response.get_cache() + + response.add_done_callback(on_rpc_complete) + + # The below return is REQUIRED. + return response diff --git a/examples/custom_logging_interceptor/get_campaigns.py b/examples/custom_logging_interceptor/get_campaigns.py new file mode 100755 index 000000000..0af97ed12 --- /dev/null +++ b/examples/custom_logging_interceptor/get_campaigns.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This example illustrates how to get all campaigns and log details to +Google Cloud Logging using a custom logging interceptor. +""" + + +import argparse +import sys + +from google.ads.googleads.client import GoogleAdsClient +from google.ads.googleads.errors import GoogleAdsException +from cloud_logging_interceptor import CloudLoggingInterceptor + + +_API_VERSION = "v10" + +def main(client, customer_id): + # Instantiate the GoogleAdsService object with a custom interceptor. + ga_service = client.get_service("GoogleAdsService", interceptors=[CloudLoggingInterceptor(api_version=_API_VERSION)]) + + query = """ + SELECT + campaign.id, + campaign.name + FROM campaign + ORDER BY campaign.id + LIMIT 10""" + + # Issues a search request using streaming. + stream = ga_service.search_stream(customer_id=customer_id, query=query) + + for batch in stream: + for row in batch.results: + print( + f"Campaign with ID {row.campaign.id} and name " + f'"{row.campaign.name}" was found.' + ) + + +if __name__ == "__main__": + # GoogleAdsClient will read the google-ads.yaml configuration file in the + # home directory if none is specified. + googleads_client = GoogleAdsClient.load_from_storage(version=_API_VERSION) + + parser = argparse.ArgumentParser( + description="Lists all campaigns for specified customer." + ) + # The following argument(s) should be provided to run the example. + parser.add_argument( + "-c", + "--customer_id", + type=str, + required=True, + help="The Google Ads customer ID.", + ) + args = parser.parse_args() + + try: + main(googleads_client, args.customer_id) + except GoogleAdsException as ex: + print( + f'Request with ID "{ex.request_id}" failed with status ' + f'"{ex.error.code().name}" and includes the following errors:' + ) + for error in ex.failure.errors: + print(f'\tError with message "{error.message}".') + if error.location: + for field_path_element in error.location.field_path_elements: + print(f"\t\tOn field: {field_path_element.field_name}") + sys.exit(1) diff --git a/examples/custom_logging_interceptor/requirements.txt b/examples/custom_logging_interceptor/requirements.txt new file mode 100644 index 000000000..24c752c95 --- /dev/null +++ b/examples/custom_logging_interceptor/requirements.txt @@ -0,0 +1 @@ +google-cloud-logging diff --git a/examples/misc/navigate_search_result_pages_caching_tokens.py b/examples/misc/navigate_search_result_pages_caching_tokens.py new file mode 100755 index 000000000..14b3d95ac --- /dev/null +++ b/examples/misc/navigate_search_result_pages_caching_tokens.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This example illustrates how to cache page tokens in paged search requests. + +GoogleAdsService.Search results are paginated but they can only be retrieved in +sequence starting with the first page. More details at: +https://developers.google.com/google-ads/api/docs/reporting/paging. + +This example searches campaigns illustrating how GoogleAdsService.Search result +page tokens can be cached and reused to retrieve previous pages. This is useful +when you need to request pages that were already requested in the past without +starting over from the first page. For example, it can be used to implement an +interactive application that displays a page of results at a time without +caching all the results first. + +To add campaigns, run the basic_examples/add_campaigns.py example. +""" + + +import argparse +import math +import sys + +from google.ads.googleads.client import GoogleAdsClient +from google.ads.googleads.errors import GoogleAdsException + + +# The maximum number of results to retrieve in the query. +_RESULTS_LIMIT = 10 +# The number of results to return per page. +_PAGE_SIZE = 3 + + +def main(client, customer_id): + """The main method that creates all necessary entities for the example. + + Args: + client: an initialized GoogleAdsClient instance. + customer_id: a client customer ID. + """ + # The cache of page tokens which is stored in-memory with the page numbers + # as keys. The first page's token is always an empty string. + page_tokens = {1: ""} + + print("\n--- 0. Fetch page #1 to get metadata:\n") + + # Creates a query that retrieves the campaigns. + query = f""" + SELECT + campaign.id, + campaign.name + FROM campaign + ORDER BY campaign.name + LIMIT {_RESULTS_LIMIT}""" + + request = client.get_type("SearchGoogleAdsRequest") + request.customer_id = customer_id + request.query = query + # Sets the number of results to return per page. + request.page_size = _PAGE_SIZE + # Requests to return the total results count. This is necessary to determine + # how many pages of results there are. + request.return_total_results_count = True + + googleads_service = client.get_service("GoogleAdsService") + response = googleads_service.search(request=request) + _cache_next_page_token(page_tokens, response, 1) + + # Determines the total number of results and prints it. The total results + # count does not take into consideration the LIMIT clause of the query so + # we need to find the minimal value between the limit and the total results + # count. + total_number_of_results = min(_RESULTS_LIMIT, response.total_results_count) + print(f"Total number of campaigns found: {total_number_of_results}.") + + # Determines the total number of pages and prints it. + total_number_of_pages = math.ceil(total_number_of_results / _PAGE_SIZE) + print(f"Total number of pages: {total_number_of_pages}.") + if not total_number_of_pages: + print("Could not find any campaigns.") + sys.exit(1) + + # Demonstrates how the logic works when iterating pages forward. We select + # a page that is in the middle of the result set so that only a subset of + # the page tokens will be cached. + middle_page_number = math.ceil(total_number_of_pages / 2) + print(f"\n--- 1. Print results of the page {middle_page_number}\n") + _fetch_and_print_results( + client, customer_id, query, middle_page_number, page_tokens + ) + + +# [START navigate_search_result_pages_caching_tokens] +def _fetch_and_print_results( + client, customer_id, query, page_number, page_tokens +): + """Fetches and prints the results of a page using a cache of page tokens. + + Args: + client: an initialized GoogleAdsClient instance. + customer_id: a client customer ID. + query: the search query. + page_number: the number of the page to fetch and print results for. + page_tokens: the cache of page tokens to use and update. + """ + current_page_number = None + # There is no need to fetch the pages we already know the page tokens for. + if page_tokens.get(page_number, None): + print( + "The token of the request page was cached, we will use it to get " + "the results." + ) + current_page_number = page_number + else: + count = len(page_tokens.keys()) + print( + "The token of the requested page was never cached, we will use " + f"the closest page we know the token for (page {count}) and " + "sequentially get pages from there." + ) + current_page_number = count + + googleads_service = client.get_service("GoogleAdsService") + # Fetches next pages in sequence and caches their tokens until the requested + # page results are returned. + while current_page_number <= page_number: + # Fetches the next page. + print(f"Fetching page {current_page_number}...") + request = client.get_type("SearchGoogleAdsRequest") + request.customer_id = customer_id + request.query = query + request.page_size = _PAGE_SIZE + request.return_total_results_count = True + # Uses the page token cached for the current page number. + request.page_token = page_tokens[current_page_number] + + response = googleads_service.search(request=request) + _cache_next_page_token(page_tokens, response, current_page_number) + current_page_number += 1 + + # Prints the results of the requested page. + print(f"Printing results found for the page {page_number}.") + for row in response.results: + print( + f" - Campaign with ID {row.campaign.id} and name " + f"{row.campaign.name}." + ) + # [END navigate_search_result_pages_caching_tokens] + + +def _cache_next_page_token(page_tokens, page, page_number): + """Updates the cache of page tokens based on a page that was retrieved. + + Args: + page_tokens: a cache of page tokens to update. + page: the search results page that was retrieved. + page_number: the number of the page that was retrieved. + """ + # Check if the page_token exists and that it hasn't already been cached. + if page.next_page_token and not page_tokens.get(page_number, None): + page_tokens[page_number + 1] = page.next_page_token + print(f"Cached token for page #{page_number + 1}.") + + +if __name__ == "__main__": + # GoogleAdsClient will read the google-ads.yaml configuration file in the + # home directory if none is specified. + googleads_client = GoogleAdsClient.load_from_storage(version="v10") + + parser = argparse.ArgumentParser( + description=( + "Demonstrates how to cache and reuse page tokens in a " + "GoogleAdService.Search request." + ) + ) + # The following argument(s) should be provided to run the example. + parser.add_argument( + "-c", + "--customer_id", + type=str, + required=True, + help="The Google Ads customer ID.", + ) + + args = parser.parse_args() + + try: + main( + googleads_client, args.customer_id, + ) + except GoogleAdsException as ex: + print( + f'Request with ID "{ex.request_id}" failed with status ' + f'"{ex.error.code().name}" and includes the following errors:' + ) + for error in ex.failure.errors: + print(f'Error with message "{error.message}".') + if error.location: + for field_path_element in error.location.field_path_elements: + print(f"\t\tOn field: {field_path_element.field_name}") + sys.exit(1) diff --git a/examples/remarketing/add_dynamic_remarketing_asset.py b/examples/remarketing/add_dynamic_remarketing_asset.py index 101655155..987aa8dde 100755 --- a/examples/remarketing/add_dynamic_remarketing_asset.py +++ b/examples/remarketing/add_dynamic_remarketing_asset.py @@ -59,7 +59,10 @@ def _create_asset(client, customer_id): """ # Creates an operation to add the asset. operation = client.get_type("AssetOperation") - education_asset = operation.create.dynamic_education_asset + asset = operation.create + # The final_urls list must not be empty + asset.final_urls.append("https://www.example.com") + education_asset = asset.dynamic_education_asset # Defines meta-information about the school and program. education_asset.school_name = "The University of Unknown" education_asset.address = "Building 1, New York, 12345, USA" diff --git a/examples/remarketing/upload_offline_conversion.py b/examples/remarketing/upload_offline_conversion.py index 255e0df8c..aedab9836 100644 --- a/examples/remarketing/upload_offline_conversion.py +++ b/examples/remarketing/upload_offline_conversion.py @@ -37,6 +37,8 @@ def main( conversion_value, conversion_custom_variable_id, conversion_custom_variable_value, + gbraid, + wbraid, ): """Creates a click conversion with a default currency of USD. @@ -44,7 +46,8 @@ def main( client: An initialized GoogleAdsClient instance. customer_id: The client customer ID string. conversion_action_id: The ID of the conversion action to upload to. - gclid: The Google Click Identifier ID. + gclid: The Google Click Identifier ID. If set, the wbraid and gbraid + parameters must be None. conversion_date_time: The the date and time of the conversion (should be after the click time). The format is 'yyyy-mm-dd hh:mm:ss+|-hh:mm', e.g. '2021-01-01 12:32:45-08:00'. @@ -53,15 +56,25 @@ def main( variable to associate with the upload. conversion_custom_variable_value: The str value of the conversion custom variable to associate with the upload. + gbraid: The GBRAID for the iOS app conversion. If set, the gclid and + wbraid parameters must be None. + wbraid: The WBRAID for the iOS app conversion. If set, the gclid and + gbraid parameters must be None. """ click_conversion = client.get_type("ClickConversion") conversion_action_service = client.get_service("ConversionActionService") - click_conversion.conversion_action = ( - conversion_action_service.conversion_action_path( - customer_id, conversion_action_id - ) + click_conversion.conversion_action = conversion_action_service.conversion_action_path( + customer_id, conversion_action_id ) - click_conversion.gclid = gclid + + # Sets the single specified ID field. + if gclid: + click_conversion.gclid = gclid + elif gbraid: + click_conversion.gbraid = gbraid + else: + click_conversion.wbraid = wbraid + click_conversion.conversion_value = float(conversion_value) click_conversion.conversion_date_time = conversion_date_time click_conversion.currency_code = "USD" @@ -79,10 +92,8 @@ def main( request.customer_id = customer_id request.conversions = [click_conversion] request.partial_failure = True - conversion_upload_response = ( - conversion_upload_service.upload_click_conversions( - request=request, - ) + conversion_upload_response = conversion_upload_service.upload_click_conversions( + request=request, ) uploaded_click_conversion = conversion_upload_response.results[0] print( @@ -117,16 +128,6 @@ def main( required=True, help="The conversion action ID.", ) - parser.add_argument( - "-g", - "--gclid", - type=str, - required=True, - help="The Google Click Identifier ID " - "(gclid) which should be newer than the number of " - "days set on the conversion window of the conversion " - "action.", - ) parser.add_argument( "-t", "--conversion_date_time", @@ -156,6 +157,36 @@ def main( type=str, help="The value of the conversion custom variable to associate with the upload.", ) + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument( + "-g", + "--gclid", + type=str, + help="The Google Click Identifier (gclid) which should be newer than " + "the number of days set on the conversion window of the conversion " + "action. Only one of either a gclid, WBRAID, or GBRAID identifier can " + "be passed into this example. See the following for more details: " + "https://developers.google.com/google-ads/api/docs/conversions/upload-clicks", + ) + group.add_argument( + "-b", + "--gbraid", + type=str, + help="The GBRAID identifier for an iOS app conversion. Only one of " + "either a gclid, WBRAID, or GBRAID identifier can be passed into this " + "example. See the following for more details: " + "https://developers.google.com/google-ads/api/docs/conversions/upload-clicks", + ) + group.add_argument( + "-d", + "--wbraid", + type=str, + help="The WBRAID identifier for an iOS app conversion. Only one of " + "either a gclid, WBRAID, or GBRAID identifier can be passed into this " + "example. See the following for more details: " + "https://developers.google.com/google-ads/api/docs/conversions/upload-clicks", + ) + args = parser.parse_args() try: @@ -168,6 +199,8 @@ def main( args.conversion_value, args.conversion_custom_variable_id, args.conversion_custom_variable_value, + args.gbraid, + args.wbraid, ) except GoogleAdsException as ex: print( diff --git a/examples/shopping_ads/add_performance_max_product_listing_group_tree.py b/examples/shopping_ads/add_performance_max_product_listing_group_tree.py new file mode 100644 index 000000000..a9e0fbcee --- /dev/null +++ b/examples/shopping_ads/add_performance_max_product_listing_group_tree.py @@ -0,0 +1,559 @@ +#!/usr/bin/env python +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Shows how to add product partitions to a Performance Max retail campaign. + +For Performance Max campaigns, product partitions are represented using the +AssetGroupListingGroupFilter resource. This resource can be combined with +itself to form a hierarchy that creates a product partition tree. + +For more information about Performance Max retail campaigns, see the +shopping_ads/add_performance_max_retail_campaign.py example. +""" + + +import argparse +import sys + +from google.ads.googleads.client import GoogleAdsClient +from google.ads.googleads.errors import GoogleAdsException + + +# We specify temporary IDs that are specific to a single mutate request. +# Temporary IDs are always negative and unique within one mutate request. +# +# See https://developers.google.com/google-ads/api/docs/mutating/best-practices +# for further details. +# +# These temporary IDs are fixed because they are used in multiple places. +_TEMPORARY_ID_LISTING_GROUP_ROOT = -1 + +# Sets the page size for paged search queries. +_PAGE_SIZE = 10000 + + +class AssetGroupListingGroupFilterRemoveOperationFactory: + def __init__(self, client, listing_group_filters): + """Factory class for creating sorted list of MutateOperations. + + The operations remove the given tree of AssetGroupListingGroupFilters, + When removing these listing group filters, the remove operations must be + sent in a specific order that removes leaf nodes before removing parent + nodes. + + Args: + client: an initialized GoogleAdsClient instance. + listing_group_filters: a list of AssetGroupListingGroupFilters. + """ + if len(listing_group_filters) < 1: + raise ValueError("No listing group filters to remove.") + + self.client = client + self.root_resource_name = None + self.parents_to_children = {} + + # Process the given list of listing group filters to identify the root + # node and any parent to child edges in the tree. + for listing_group_filter in listing_group_filters: + resource_name = listing_group_filter.resource_name + parent_resource_name = ( + listing_group_filter.parent_listing_group_filter + ) + + # When the node has no parent, it means it's the root node. + if not parent_resource_name: + if self.root_resource_name: + # Check if another root node has already been detected and + # raise an error if so, as only one root node can exist for + # a given tree. + raise ValueError("More than one listing group parent node.") + else: + self.root_resource_name = resource_name + else: + # Check if we've already visited a sibling in this group, and + # either update it or create a new branch accordingly. + if parent_resource_name in self.parents_to_children: + # If we've visitied a sibling already, add this resource + # name to the existing list. + self.parents_to_children[parent_resource_name].append( + resource_name + ) + else: + # If we haven't visited any siblings, then create a new list + # for this parent node and add this resource name to it. + self.parents_to_children[parent_resource_name] = [ + resource_name + ] + + # [START add_performance_max_product_listing_group_tree_2] + def remove_all(self): + """Creates a list of MutateOperations for the listing group filter tree. + + Returns: + A list of MutateOperations that remove each specified + AssetGroupListingGroupFilter in the tree passed in when this + class was initialized. + """ + return self.remove_descendants_and_filter(self.root_resource_name) + # [END add_performance_max_product_listing_group_tree_2] + + # [START add_performance_max_product_listing_group_tree_3] + def remove_descendants_and_filter(self, resource_name): + """Builds a post-order sorted list of MutateOperations. + + Creates a list of MutateOperations that remove all the descendents of + the specified AssetGroupListingGroupFilter resource name. The order of + removal is post-order, where all the children (and their children, + recursively) are removed first. Then, the root node itself is removed. + + Args: + resource_name: an AssetGroupListingGroupFilter resource name. + + Returns: + a sorted list of MutateOperations. + """ + operations = [] + + # Check if resource name is a parent. + if resource_name in self.parents_to_children: + # If this resource name is a parent, call this method recursively + # on each of its children. + for child in self.parents_to_children[resource_name]: + operations.extend(self.remove_descendants_and_filter(child)) + + mutate_operation = self.client.get_type("MutateOperation") + mutate_operation.asset_group_listing_group_filter_operation.remove = ( + resource_name + ) + operations.append(mutate_operation) + + return operations + # [END add_performance_max_product_listing_group_tree_3] + + +class AssetGroupListingGroupFilterCreateOperationFactory: + def __init__(self, client, customer_id, asset_group_id, root_listing_id): + """A factory class for creating MutateOperations. + + These operations create new AssetGroupListingGroupFilterMutateOperation + instances using the given customer ID and asset group ID. + + Args: + client: an initialized GoogleAdsClient instance. + customer_id: a client customer ID. + asset_group_id: the asset group id for the Performance Max campaign. + root_listing_id: a temporary ID to use as the listing group root. + """ + self.client = client + self.customer_id = customer_id + self.asset_group_id = asset_group_id + self.root_listing_id = root_listing_id + self.next_temp_id = self.root_listing_id - 1 + + def next_id(self): + """Returns the next temporary ID for use in a sequence. + + The temporary IDs are used in the list of MutateOperations in order to + refer to objects in the request that aren't in the API yet. For more + details see: + https://developers.google.com/google-ads/api/docs/mutating/best-practices#temporary_resource_names + + Returns: + A new temporary ID. + """ + self.next_temp_id -= 1 + return self.next_temp_id + + # [START add_performance_max_product_listing_group_tree_4] + def create_root(self): + """Creates a MutateOperation to add a root AssetGroupListingGroupFilter. + + Returns: + A MutateOperation for a new AssetGroupListingGroupFilter. + """ + googleads_service = self.client.get_service("GoogleAdsService") + + mutate_operation = self.client.get_type("MutateOperation") + asset_group_listing_group_filter = ( + mutate_operation.asset_group_listing_group_filter_operation.create + ) + + asset_group_listing_group_filter.resource_name = googleads_service.asset_group_listing_group_filter_path( + self.customer_id, self.asset_group_id, self.root_listing_id + ) + asset_group_listing_group_filter.asset_group = googleads_service.asset_group_path( + self.customer_id, self.asset_group_id + ) + # Since this is the root node, do not set the + # parent_listing_group_filter field. For all other nodes, this would + # refer to the parent listing group filter resource name. + # asset_group_listing_group_filter.parent_listing_group_filter = "" + + # Unlike the add_performance_max_retail_campaign example, the type for + # the root node here must be Subsivision because we add child + # partitions under it. + asset_group_listing_group_filter.type_ = ( + self.client.enums.ListingGroupFilterTypeEnum.SUBDIVISION + ) + + # Because this is a Performance Max campaign for retail, we need to + # specify that this is a shopping vertical. + asset_group_listing_group_filter.vertical = ( + self.client.enums.ListingGroupFilterVerticalEnum.SHOPPING + ) + + return mutate_operation + # [END add_performance_max_product_listing_group_tree_4] + + # [START add_performance_max_product_listing_group_tree_5] + def create_subdivision(self, parent_id, temporary_id, dimension): + """Creates a MutateOperation to add an AssetGroupListingGroupFilter. + + Use this method if the filter will have child filters. Otherwise use + the create_unit method. + + Args: + parent_id: the ID of the parent AssetGroupListingGroupFilter. + temporary_id: a temporary ID for the operation being created. + dimension: The dimension to associate with this new + AssetGroupListingGroupFilter. + + Returns: + a MutateOperation for a new AssetGroupListingGroupFilter + """ + googleads_service = self.client.get_service("GoogleAdsService") + + mutate_operation = self.client.get_type("MutateOperation") + asset_group_listing_group_filter = ( + mutate_operation.asset_group_listing_group_filter_operation.create + ) + + asset_group_listing_group_filter.resource_name = googleads_service.asset_group_listing_group_filter_path( + self.customer_id, self.asset_group_id, temporary_id + ) + asset_group_listing_group_filter.asset_group = googleads_service.asset_group_path( + self.customer_id, self.asset_group_id + ) + asset_group_listing_group_filter.parent_listing_group_filter = googleads_service.asset_group_listing_group_filter_path( + self.customer_id, self.asset_group_id, parent_id + ) + asset_group_listing_group_filter.type_ = ( + self.client.enums.ListingGroupFilterTypeEnum.SUBDIVISION + ) + asset_group_listing_group_filter.vertical = ( + self.client.enums.ListingGroupFilterVerticalEnum.SHOPPING + ) + asset_group_listing_group_filter.case_value = dimension + + return mutate_operation + # [END add_performance_max_product_listing_group_tree_5] + + # [START add_performance_max_product_listing_group_tree_6] + def create_unit(self, parent_id, temporary_id, dimension): + """Creates a MutateOperation to add an AssetGroupListingGroupFilter. + + Use this method if the filter will not have child filters. Otherwise use + the create_subdivision method. + + Args: + parent_id: the ID of the parent AssetGroupListingGroupFilter. + dimension: The dimension to associate with this new + AssetGroupListingGroupFilter. + + Returns: + a MutateOperation for a new AssetGroupListingGroupFilter + """ + googleads_service = self.client.get_service("GoogleAdsService") + + mutate_operation = self.client.get_type("MutateOperation") + asset_group_listing_group_filter = ( + mutate_operation.asset_group_listing_group_filter_operation.create + ) + + asset_group_listing_group_filter.resource_name = googleads_service.asset_group_listing_group_filter_path( + self.customer_id, self.asset_group_id, temporary_id + ) + asset_group_listing_group_filter.asset_group = googleads_service.asset_group_path( + self.customer_id, self.asset_group_id + ) + asset_group_listing_group_filter.parent_listing_group_filter = googleads_service.asset_group_listing_group_filter_path( + self.customer_id, self.asset_group_id, parent_id + ) + # We must use the UnitIncluded type to indicate that the + # AssetGroupListingGroupFilter won't have children. + asset_group_listing_group_filter.type_ = ( + self.client.enums.ListingGroupFilterTypeEnum.UNIT_INCLUDED + ) + # Because this is a Performance Max campaign for retail, we need to + # specify that this is in the shopping vertical. + asset_group_listing_group_filter.vertical = ( + self.client.enums.ListingGroupFilterVerticalEnum.SHOPPING + ) + asset_group_listing_group_filter.case_value = dimension + + return mutate_operation + # [END add_performance_max_product_listing_group_tree_6] + + +# [START add_performance_max_product_listing_group_tree] +def main(client, customer_id, asset_group_id, replace_existing_tree): + """The main method that creates all necessary entities for the example. + + Args: + client: an initialized GoogleAdsClient instance. + customer_id: a client customer ID. + asset_group_id: the asset group id for the Performance Max campaign. + replace_existing_tree: option to remove existing product tree from the + passed in asset group. + """ + googleads_service = client.get_service("GoogleAdsService") + asset_group_resource_name = googleads_service.asset_group_path( + customer_id, asset_group_id + ) + operations = [] + + if replace_existing_tree: + # Retrieve a list of existing AssetGroupListingGroupFilters + existing_listing_group_filters = _get_all_existing_listing_group_filter_assets_in_asset_group( + client, customer_id, asset_group_resource_name + ) + + # If present, create MutateOperations to remove each + # AssetGroupListingGroupFilter and add them to the list of operations. + if existing_listing_group_filters: + remove_operation_factory = AssetGroupListingGroupFilterRemoveOperationFactory( + client, existing_listing_group_filters + ) + operations.extend(remove_operation_factory.remove_all()) + + create_operation_factory = AssetGroupListingGroupFilterCreateOperationFactory( + client, customer_id, asset_group_id, _TEMPORARY_ID_LISTING_GROUP_ROOT + ) + + operations.append(create_operation_factory.create_root()) + + new_dimension = client.get_type("ListingGroupFilterDimension") + new_dimension.product_condition.condition = ( + client.enums.ListingGroupFilterProductConditionEnum.NEW + ) + operations.append( + create_operation_factory.create_unit( + _TEMPORARY_ID_LISTING_GROUP_ROOT, + create_operation_factory.next_id(), + new_dimension, + ) + ) + + used_dimension = client.get_type("ListingGroupFilterDimension") + used_dimension.product_condition.condition = ( + client.enums.ListingGroupFilterProductConditionEnum.USED + ) + operations.append( + create_operation_factory.create_unit( + _TEMPORARY_ID_LISTING_GROUP_ROOT, + create_operation_factory.next_id(), + used_dimension, + ) + ) + + # We save this ID because create child nodes underneath it. + subdivision_id_condition_other = create_operation_factory.next_id() + + # All sibling nodes must have the same dimension type. We use an empty + # product_condition to indicate that this is an "Other" partition. + other_dimension = client.get_type("ListingGroupFilterDimension") + # This triggers the presence of the product_condition field without + # specifying any field values. This is important in order to tell the API + # that this is an "other" node. + other_dimension.product_condition._pb.SetInParent() + # We're calling create_subdivision because this listing group will have + # children. + operations.append( + create_operation_factory.create_subdivision( + _TEMPORARY_ID_LISTING_GROUP_ROOT, + subdivision_id_condition_other, + other_dimension, + ) + ) + + cool_dimension = client.get_type("ListingGroupFilterDimension") + cool_dimension.product_brand.value = "CoolBrand" + operations.append( + create_operation_factory.create_unit( + subdivision_id_condition_other, + create_operation_factory.next_id(), + cool_dimension, + ) + ) + + cheap_dimension = client.get_type("ListingGroupFilterDimension") + cheap_dimension.product_brand.value = "CheapBrand" + operations.append( + create_operation_factory.create_unit( + subdivision_id_condition_other, + create_operation_factory.next_id(), + cheap_dimension, + ) + ) + + empty_dimension = client.get_type("ListingGroupFilterDimension") + # This triggers the presence of the product_brand field without specifying + # any field values. This is important in order to tell the API + # that this is an "other" node. + empty_dimension.product_brand._pb.SetInParent() + operations.append( + create_operation_factory.create_unit( + subdivision_id_condition_other, + create_operation_factory.next_id(), + empty_dimension, + ) + ) + + response = googleads_service.mutate( + customer_id=customer_id, mutate_operations=operations + ) + + _print_response_details(operations, response) + # [END add_performance_max_product_listing_group_tree] + + +# [START add_performance_max_product_listing_group_tree_7] +def _get_all_existing_listing_group_filter_assets_in_asset_group( + client, customer_id, asset_group_resource_name +): + """Fetches all of the listing group filters in an asset group. + + Args: + client: an initialized GoogleAdsClient instance. + customer_id: a client customer ID. + asset_group_resource_name: the asset group resource name for the + Performance Max campaign. + + Returns: + a list of AssetGroupListingGroupFilters. + """ + query = f""" + SELECT + asset_group_listing_group_filter.resource_name, + asset_group_listing_group_filter.parent_listing_group_filter + FROM asset_group_listing_group_filter + WHERE asset_group_listing_group_filter.asset_group = '{asset_group_resource_name}'""" + + request = client.get_type("SearchGoogleAdsRequest") + request.customer_id = customer_id + request.query = query + request.page_size = _PAGE_SIZE + + googleads_service = client.get_service("GoogleAdsService") + response = googleads_service.search(request=request) + + return [result.asset_group_listing_group_filter for result in response] + # [END add_performance_max_product_listing_group_tree_7] + + +def _print_response_details(mutate_operations, response): + """Prints the details of the GoogleAdsService.Mutate request. + + This uses the original list of mutate operations to map the operation + result to what was sent. It can be assumed that the initial set of + operations and the list returned in the response are in the same order. + + Args: + mutate_operations: a list of MutateOperation instances. + response: a GoogleAdsMutateResponse instance. + """ + # Parse the Mutate response to print details about the entities that were + # created in the request. + for i, result in enumerate(response.mutate_operation_responses): + requested = mutate_operations[i] + resource_name = ( + result.asset_group_listing_group_filter_result.resource_name + ) + + # Check the operation type for the requested operation in order to + # log whether it was a remove or a create request. + if "remove" in requested.asset_group_listing_group_filter_operation: + print( + "Removed an AssetGroupListingGroupFilter with resource name: " + f"'{resource_name}'." + ) + elif "create" in requested.asset_group_listing_group_filter_operation: + print( + "Created an AssetGroupListingGroupFilter with resource name: " + f"'{resource_name}'." + ) + else: + print("An unknown operation was returned.") + + +if __name__ == "__main__": + # GoogleAdsClient will read the google-ads.yaml configuration file in the + # home directory if none is specified. + googleads_client = GoogleAdsClient.load_from_storage(version="v10") + + parser = argparse.ArgumentParser( + description=( + "Adds product partitions to a Performance Max retail campaign." + ) + ) + # The following argument(s) should be provided to run the example. + parser.add_argument( + "-c", + "--customer_id", + type=str, + required=True, + help="The Google Ads customer ID.", + ) + parser.add_argument( + "-a", + "--asset_group_id", + type=int, + required=True, + help="The asset group id for the Performance Max campaign.", + ) + parser.add_argument( + "-r", + "--replace_existing_tree", + action="store_true", + help=( + "Whether or not to replace the existing product partition tree. " + "If the current AssetGroup already has a tree of " + "ListingGroupFilters, attempting to add a new set of " + "ListingGroupFilters including a root filter will result in an " + "ASSET_GROUP_LISTING_GROUP_FILTER_ERROR_MULTIPLE_ROOTS error. " + "Setting this option to true will remove the existing tree and " + "prevent this error." + ), + ) + + args = parser.parse_args() + + try: + main( + googleads_client, + args.customer_id, + args.asset_group_id, + args.replace_existing_tree, + ) + except GoogleAdsException as ex: + print( + f'Request with ID "{ex.request_id}" failed with status ' + f'"{ex.error.code().name}" and includes the following errors:' + ) + for error in ex.failure.errors: + print(f'Error with message "{error.message}".') + if error.location: + for field_path_element in error.location.field_path_elements: + print(f"\t\tOn field: {field_path_element.field_name}") + sys.exit(1) diff --git a/google/ads/googleads/__init__.py b/google/ads/googleads/__init__.py index 6413d40a7..962fc6f49 100644 --- a/google/ads/googleads/__init__.py +++ b/google/ads/googleads/__init__.py @@ -18,4 +18,4 @@ import google.ads.googleads.util -VERSION = "14.1.0" +VERSION = "15.1.0" diff --git a/google/ads/googleads/interceptors/__init__.py b/google/ads/googleads/interceptors/__init__.py index ba1837233..d3feb1555 100644 --- a/google/ads/googleads/interceptors/__init__.py +++ b/google/ads/googleads/interceptors/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from .helpers import mask_message +from .interceptor import Interceptor from .metadata_interceptor import MetadataInterceptor from .exception_interceptor import ExceptionInterceptor from .logging_interceptor import LoggingInterceptor diff --git a/google/ads/googleads/interceptors/helpers.py b/google/ads/googleads/interceptors/helpers.py new file mode 100644 index 000000000..e1e0da0ae --- /dev/null +++ b/google/ads/googleads/interceptors/helpers.py @@ -0,0 +1,170 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from copy import deepcopy +from google.ads.googleads.util import ( + set_nested_message_field, + get_nested_attr, + convert_proto_plus_to_protobuf, + proto_copy_from, +) + +# The keys in this dict represent messages that have fields that may contain +# sensitive information, such as PII like an email address, that shouldn't +# be logged. The values are a list of dot-delimited field paths on the message +# where the sensitive information may exist. Messages listed here will have +# their sensitive fields redacted in logs when transmitted in the following +# scenarios: +# 1. They are returned as part of a Search or SearchStream request. +# 2. They are returned individually in a Get request. +# 3. They are sent to the API as part of a Mutate request. +_MESSAGES_WITH_SENSITIVE_FIELDS = { + "CustomerUserAccess": ["email_address", "inviter_user_email_address"], + "CustomerUserAccessInvitation": ["email_address"], + "MutateCustomerUserAccessRequest": [ + "operation.update.email_address", + "operation.update.inviter_user_email_address", + ], + "ChangeEvent": ["user_email"], + "CreateCustomerClientRequest": ["email_address"], + "Feed": ["places_location_feed_data.email_address"], +} + +# This is a list of the names of messages that return search results from the +# API. These messages contain other messages that may contain sensitive +# information that needs to be masked before being logged. +_SEARCH_RESPONSE_MESSAGE_NAMES = [ + "SearchGoogleAdsResponse", + "SearchGoogleAdsStreamResponse", +] + + +def _copy_message(message): + """Returns a copy of the given message. + + Args: + message: An object containing information from an API request + or response. + + Returns: + A copy of the given message. + """ + return deepcopy(message) + + +def _mask_message_fields(field_list, message, mask): + """Copies the given message and masks sensitive fields. + + Sensitive fields are given as a list of strings and are overridden + with the word "REDACTED" to protect PII from being logged. + + Args: + field_list: A list of strings specifying the fields on the message + that should be masked. + message: An object containing information from an API request + or response. + mask: A str that should replace the sensitive information in the + message. + + Returns: + A new instance of the message object with fields copied and masked + where necessary. + """ + copy = _copy_message(message) + + for field_path in field_list: + try: + # Only mask the field if it's been set on the message. + if get_nested_attr(copy, field_path): + set_nested_message_field(copy, field_path, mask) + except AttributeError: + # AttributeError is raised when the field is not defined on the + # message. In this case there's nothing to mask and the field + # should be skipped. + break + + return copy + + +def _mask_google_ads_search_response(message, mask): + """Copies and masks sensitive data in a Search response + + Response messages include instances of GoogleAdsSearchResponse and + GoogleAdsSearchStreamResponse. + + Args: + message: A SearchGoogleAdsResponse or SearchGoogleAdsStreamResponse + instance. + mask: A str that should replace the sensitive information in the + message. + + Returns: + A copy of the message with sensitive fields masked. + """ + copy = _copy_message(message) + + for row in copy.results: + # Each row is an instance of GoogleAdsRow. The ListFields method + # returns a list of (FieldDescriptor, value) tuples for all fields in + # the message which are not empty. If this message is a proto_plus proto + # then we need to access the native proto to call ListFields. If it's + # not proto_plus we can assume it's protobuf and can access ListFields + # directly. + if hasattr(row, "_pb"): + row_fields = convert_proto_plus_to_protobuf(row).ListFields() + else: + row_fields = row.ListFields() + for field in row_fields: + field_descriptor = field[0] + # field_name is the name of the field on the GoogleAdsRow instance, + # for example "campaign" or "customer_user_access" + field_name = field_descriptor.name + # message_name is the name of the message, similar to the class + # name, for example "Campaign" or "CustomerUserAccess" + message_name = field_descriptor.message_type.name + if message_name in _MESSAGES_WITH_SENSITIVE_FIELDS.keys(): + nested_message = getattr(row, field_name) + masked_message = _mask_message_fields( + _MESSAGES_WITH_SENSITIVE_FIELDS[message_name], + nested_message, + mask, + ) + # Overwrites the nested message with an exact copy of itself, + # where sensitive fields have been masked. + proto_copy_from(getattr(row, field_name), masked_message) + + return copy + + +def mask_message(message, mask): + """Copies and returns a message with sensitive fields masked. + + Args: + message: An object containing information from an API request + or response. + mask: A str that should replace the sensitive information in the + message. + + Returns: + A copy of the message instance with sensitive fields masked. + """ + class_name = message.__class__.__name__ + + if class_name in _SEARCH_RESPONSE_MESSAGE_NAMES: + return _mask_google_ads_search_response(message, mask) + elif class_name in _MESSAGES_WITH_SENSITIVE_FIELDS.keys(): + sensitive_fields = _MESSAGES_WITH_SENSITIVE_FIELDS[class_name] + return _mask_message_fields(sensitive_fields, message, mask) + else: + return message diff --git a/google/ads/googleads/interceptors/logging_interceptor.py b/google/ads/googleads/interceptors/logging_interceptor.py index d57cfaeec..e74d1b11e 100644 --- a/google/ads/googleads/interceptors/logging_interceptor.py +++ b/google/ads/googleads/interceptors/logging_interceptor.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,47 +20,14 @@ """ from copy import deepcopy +from types import SimpleNamespace import json import logging from grpc import UnaryUnaryClientInterceptor, UnaryStreamClientInterceptor -from .interceptor import Interceptor -from ..util import ( - set_nested_message_field, - get_nested_attr, - convert_proto_plus_to_protobuf, - proto_copy_from, -) - -# The keys in this dict represent messages that have fields that may contain -# sensitive information, such as PII like an email address, that shouldn't -# be logged. The values are a list of dot-delimited field paths on the message -# where the sensitive information may exist. Messages listed here will have -# their sensitive fields redacted in logs when transmitted in the following -# scenarios: -# 1. They are returned as part of a Search or SearchStream request. -# 2. They are returned individually in a Get request. -# 3. They are sent to the API as part of a Mutate request. -_MESSAGES_WITH_SENSITIVE_FIELDS = { - "CustomerUserAccess": ["email_address", "inviter_user_email_address"], - "CustomerUserAccessInvitation": ["email_address"], - "MutateCustomerUserAccessRequest": [ - "operation.update.email_address", - "operation.update.inviter_user_email_address", - ], - "ChangeEvent": ["user_email"], - "CreateCustomerClientRequest": ["email_address"], - "Feed": ["places_location_feed_data.email_address"], -} - -# This is a list of the names of messages that return search results from the -# API. These messages contain other messages that may contain sensitive -# information that needs to be masked before being logged. -_SEARCH_RESPONSE_MESSAGE_NAMES = [ - "SearchGoogleAdsResponse", - "SearchGoogleAdsStreamResponse", -] +from google.ads.googleads.interceptors import Interceptor, mask_message +from types import SimpleNamespace class LoggingInterceptor( @@ -95,6 +62,7 @@ def __init__(self, logger, api_version, endpoint=None): super().__init__(api_version) self.endpoint = endpoint self.logger = logger + self._cache = None def _get_trailing_metadata(self, response): """Retrieves trailing metadata from a response object. @@ -183,9 +151,10 @@ def _parse_exception_to_str(self, exception): """ try: # If the exception is from the Google Ads API then the failure - # attribute will be an instance of GoogleAdsFailure and can be - # concatenated into a log string. - return exception.failure + # attribute will be an instance of GoogleAdsFailure. We convert it + # to str here so the return type is consistent, rather than casting + # by concatenating in the logs. + return str(exception.failure) except AttributeError: try: # if exception.failure isn't present then it's likely this is a @@ -219,7 +188,7 @@ def _get_fault_message(self, exception): except AttributeError: return None - def _log_successful_request( + def log_successful_request( self, method, customer_id, @@ -240,26 +209,29 @@ def _log_successful_request( trailing_metadata_json: A JSON str of trailing_metadata. response: A grpc.Call/grpc.Future instance. """ - result = _mask_message(response.result(), self._SENSITIVE_INFO_MASK) - - self.logger.debug( - self._FULL_REQUEST_LOG_LINE.format( - method, - self.endpoint, - metadata_json, - request, - trailing_metadata_json, - result, + result = self.retrieve_and_mask_result(response) + + # Check log level here to avoid calling .format() unless necessary. + if self.logger.isEnabledFor(logging.DEBUG): + self.logger.debug( + self._FULL_REQUEST_LOG_LINE.format( + method, + self.endpoint, + metadata_json, + request, + trailing_metadata_json, + result, + ) ) - ) - self.logger.info( - self._SUMMARY_LOG_LINE.format( - customer_id, self.endpoint, method, request_id, False, None + if self.logger.isEnabledFor(logging.INFO): + self.logger.info( + self._SUMMARY_LOG_LINE.format( + customer_id, self.endpoint, method, request_id, False, None + ) ) - ) - def _log_failed_request( + def log_failed_request( self, method, customer_id, @@ -284,16 +256,17 @@ def _log_failed_request( exception_str = self._parse_exception_to_str(exception) fault_message = self._get_fault_message(exception) - self.logger.info( - self._FULL_FAULT_LOG_LINE.format( - method, - self.endpoint, - metadata_json, - request, - trailing_metadata_json, - exception_str, + if self.logger.isEnabledFor(logging.INFO): + self.logger.info( + self._FULL_FAULT_LOG_LINE.format( + method, + self.endpoint, + metadata_json, + request, + trailing_metadata_json, + exception_str, + ) ) - ) self.logger.warning( self._SUMMARY_LOG_LINE.format( @@ -306,7 +279,7 @@ def _log_failed_request( ) ) - def _log_request(self, client_call_details, request, response): + def log_request(self, client_call_details, request, response): """Handles logging all requests. Args: @@ -321,10 +294,10 @@ def _log_request(self, client_call_details, request, response): trailing_metadata = self._get_trailing_metadata(response) request_id = self.get_request_id_from_metadata(trailing_metadata) trailing_metadata_json = self.parse_metadata_to_json(trailing_metadata) - request = _mask_message(request, self._SENSITIVE_INFO_MASK) + request = mask_message(request, self._SENSITIVE_INFO_MASK) if response.exception(): - self._log_failed_request( + self.log_failed_request( method, customer_id, initial_metadata_json, @@ -334,7 +307,7 @@ def _log_request(self, client_call_details, request, response): response, ) else: - self._log_successful_request( + self.log_successful_request( method, customer_id, initial_metadata_json, @@ -362,7 +335,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): response = continuation(client_call_details, request) if self.logger.isEnabledFor(logging.WARNING): - self._log_request(client_call_details, request, response) + self.log_request(client_call_details, request, response) return response @@ -386,130 +359,33 @@ def intercept_unary_stream( def on_rpc_complete(response_future): if self.logger.isEnabledFor(logging.WARNING): - self._log_request(client_call_details, request, response_future) + self.log_request(client_call_details, request, response_future) response = continuation(client_call_details, request) response.add_done_callback(on_rpc_complete) - + self._cache = response.get_cache() return response + def retrieve_and_mask_result(self, response): + """If the cache is populated, mask the cached stream response object. + Otherwise, mask the non-streaming response result. -def _copy_message(message): - """Returns a copy of the given message. - - Args: - message: An object containing information from an API request - or response. - - Returns: - A copy of the given message. - """ - return deepcopy(message) - - -def _mask_message_fields(field_list, message, mask): - """Copies the given message and masks sensitive fields. - - Sensitive fields are given as a list of strings and are overridden - with the word "REDACTED" to protect PII from being logged. - - Args: - field_list: A list of strings specifying the fields on the message - that should be masked. - message: An object containing information from an API request - or response. - mask: A str that should replace the sensitive information in the - message. - - Returns: - A new instance of the message object with fields copied and masked - where necessary. - """ - copy = _copy_message(message) + Args: + response: A grpc.Call/grpc.Future instance. - for field_path in field_list: - try: - # Only mask the field if it's been set on the message. - if get_nested_attr(copy, field_path): - set_nested_message_field(copy, field_path, mask) - except AttributeError: - # AttributeError is raised when the field is not defined on the - # message. In this case there's nothing to mask and the field - # should be skipped. - break - - return copy - - -def _mask_google_ads_search_response(message, mask): - """Copies and masks sensitive data in a Search response - - Response messages include instances of GoogleAdsSearchResponse and - GoogleAdsSearchStreamResponse. - - Args: - message: A SearchGoogleAdsResponse or SearchGoogleAdsStreamResponse - instance. - mask: A str that should replace the sensitive information in the - message. - - Returns: - A copy of the message with sensitive fields masked. - """ - copy = _copy_message(message) - - for row in copy.results: - # Each row is an instance of GoogleAdsRow. The ListFields method - # returns a list of (FieldDescriptor, value) tuples for all fields in - # the message which are not empty. If this message is a proto_plus proto - # then we need to access the natve proto to call ListFields. If it's - # not proto_plus we can assume it's protobuf and can access ListFields - # directly. - if hasattr(row, "_pb"): - row_fields = convert_proto_plus_to_protobuf(row).ListFields() + Returns: + A masked response message. + """ + if self._cache and self._cache.initial_response_object is not None: + # Get the first streaming response object from the cache. + result = self._cache.initial_response_object else: - row_fields = row.ListFields() - for field in row_fields: - field_descriptor = field[0] - # field_name is the name of the field on the GoogleAdsRow instance, - # for example "campaign" or "customer_user_access" - field_name = field_descriptor.name - # message_name is the name of the message, similar to the class - # name, for example "Campaign" or "CustomerUserAccess" - message_name = field_descriptor.message_type.name - if message_name in _MESSAGES_WITH_SENSITIVE_FIELDS.keys(): - nested_message = getattr(row, field_name) - masked_message = _mask_message_fields( - _MESSAGES_WITH_SENSITIVE_FIELDS[message_name], - nested_message, - mask, - ) - # Overwrites the nested message with an exact copy of itself, - # where sensitive fields have been masked. - proto_copy_from(getattr(row, field_name), masked_message) - - return copy - - -def _mask_message(message, mask): - """Copies and returns a message with sensitive fields masked. - - Args: - message: An object containing information from an API request - or response. - mask: A str that should replace the sensitive information in the - message. - - Returns: - A copy of the message instance with sensitive fields masked. - """ - class_name = message.__class__.__name__ - - if class_name in _SEARCH_RESPONSE_MESSAGE_NAMES: - return _mask_google_ads_search_response(message, mask) - elif class_name in _MESSAGES_WITH_SENSITIVE_FIELDS.keys(): - sensitive_fields = _MESSAGES_WITH_SENSITIVE_FIELDS[class_name] - return _mask_message_fields(sensitive_fields, message, mask) - else: - return message + result = response.result() + # Mask the response object if debug level logging is enabled. + if type(self.logger) == logging.Logger and self.logger.isEnabledFor( + logging.DEBUG + ): + result = mask_message(result, self._SENSITIVE_INFO_MASK) + + return result diff --git a/google/ads/googleads/interceptors/response_wrappers.py b/google/ads/googleads/interceptors/response_wrappers.py index e7909d523..9b6b6998b 100644 --- a/google/ads/googleads/interceptors/response_wrappers.py +++ b/google/ads/googleads/interceptors/response_wrappers.py @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,8 +14,10 @@ """Wrapper classes used to modify the behavior of response objects.""" import grpc +import time from google.ads.googleads import util +from types import SimpleNamespace class _UnaryStreamWrapper(grpc.Call, grpc.Future): @@ -25,6 +27,7 @@ def __init__(self, underlay_call, failure_handler, use_proto_plus=False): self._failure_handler = failure_handler self._exception = None self._use_proto_plus = use_proto_plus + self._cache = SimpleNamespace(**{"initial_response_object": None}) def initial_metadata(self): return self._underlay_call.initial_metadata() @@ -83,6 +86,13 @@ def __iter__(self): def __next__(self): try: message = next(self._underlay_call) + # Store only the first streaming response object in _cache.initial_response_object. + # Each streaming response object captures 10,000 rows. + # The default log character limit is 5,000, so caching multiple + # streaming response objects does not make sense in most cases, + # as only [part of] 1 will get logged. + if self._cache.initial_response_object == None: + self._cache.initial_response_object = message if self._use_proto_plus == True: # By default this message is wrapped by proto-plus return message @@ -97,6 +107,9 @@ def __next__(self): self._exception = e raise e + def get_cache(self): + return self._cache + class _UnaryUnaryWrapper(grpc.Call, grpc.Future): def __init__(self, underlay_call, use_proto_plus=False): diff --git a/setup.py b/setup.py index 222978196..8ce2d80ed 100644 --- a/setup.py +++ b/setup.py @@ -18,13 +18,20 @@ install_requires = [ "google-auth-oauthlib >= 0.3.0, < 1.0.0", + # NOTE: Require google-api-core > 1.31.5 until + # https://github.com/googleapis/google-cloud-python/issues/10566 + # is resolved. Once resolved, require google-api-core >=2.3.2. + "google-api-core >= 1.31.5, < 3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", "google-api-core >= 2.0.1, < 3.0.0", "googleapis-common-protos >= 1.5.8, < 2.0.0", + # NOTE: Source code for grpcio and grpcio-status exist in the same + # grpc/grpc monorepo and thus these two dependencies should always + # have the same version range. "grpcio >= 1.38.1, < 2.0.0", - "proto-plus == 1.18.1", + "grpcio-status >= 1.38.1, < 2.0.0", + "proto-plus == 1.19.6", "PyYAML >= 5.1, < 6.0", "setuptools >= 40.3.0", - "nox >= 2020.12.31, < 2022.6", "protobuf >= 3.12.0, < 3.18.0", ] @@ -34,7 +41,7 @@ setup( name="google-ads", - version="15.0.0", + version="15.1.0", author="Google LLC", author_email="googleapis-packages@google.com", classifiers=[ @@ -51,6 +58,11 @@ python_requires=">=3.7", long_description=long_description, install_requires=install_requires, + extras_require={ + "tests": [ + "nox >= 2020.12.31, < 2022.6", + ] + }, license="Apache 2.0", packages=find_packages( exclude=["examples", "examples.*", "tests", "tests.*"] diff --git a/tests/interceptors/logging_interceptor_test.py b/tests/interceptors/logging_interceptor_test.py index 9f83e992b..41645a975 100644 --- a/tests/interceptors/logging_interceptor_test.py +++ b/tests/interceptors/logging_interceptor_test.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,11 +18,13 @@ import json import logging from unittest import TestCase +from types import SimpleNamespace import mock from google.ads.googleads import client as Client from google.ads.googleads.interceptors import LoggingInterceptor +from google.ads.googleads.interceptors.helpers import mask_message, _mask_message_fields, _copy_message import google.ads.googleads.interceptors.logging_interceptor as interceptor_module from google.ads.googleads import util @@ -61,6 +63,7 @@ class LoggingInterceptorTest(TestCase): _MOCK_CUSTOMER_ID = "123456" _MOCK_REQUEST_ID = "654321xyz" _MOCK_METHOD = "test/method" + _MOCK_STREAM = "Testing 123" _MOCK_TRAILING_METADATA = (("request-id", _MOCK_REQUEST_ID),) _MOCK_TRANSPORT_ERROR_METADATA = tuple() _MOCK_ERROR_MESSAGE = "Test error message" @@ -80,8 +83,8 @@ def _create_test_interceptor( for testing. Args: - config: A dict configuration - endpoint: A str representing an endpoint + version: A str representing the API version of the request. + endpoint: A str representing an endpoint. Returns: A LoggingInterceptor instance. @@ -176,12 +179,9 @@ def _mock_trailing_metadata(): del exception.request_id return exception - def _get_mock_response(self, failed=False): + def _get_mock_response(self, failed=False, streaming=False): """Generates a mock response object for use in tests. - Accepts a "failed" param that tells the returned mocked response to - mimic a failed response. - Returns: A Mock instance with mock "exception" and "trailing_metadata" methods @@ -189,6 +189,8 @@ def _get_mock_response(self, failed=False): Args: failed: a bool indicating whether the mock response should be in a failed state or not. Default is False. + streaming: a bool indicating whether the mock response should + represent a streaming response. """ def mock_exception_fn(): @@ -197,11 +199,21 @@ def mock_exception_fn(): return None def mock_result_fn(): + if streaming: + response = self._MOCK_STREAM + return response return self._MOCK_RESPONSE_MSG + def mock_get_cache(): + if failed: + return SimpleNamespace(**{"initial_response_object": None}) + else: + return SimpleNamespace(**{"initial_response_object": self._MOCK_STREAM}) + mock_response = mock.Mock() mock_response.exception = mock_exception_fn mock_response.trailing_metadata = self._get_trailing_metadata_fn() + mock_response.get_cache = mock_get_cache mock_response.result = mock_result_fn return mock_response @@ -328,13 +340,14 @@ def test_intercept_unary_stream_successful_request(self): """ mock_client_call_details = self._get_mock_client_call_details() mock_request = self._get_mock_request() - mock_response = self._get_mock_response() + mock_response = self._get_mock_response(streaming=True) mock_trailing_metadata = mock_response.trailing_metadata() def mock_add_done_callback(fn): fn(mock_response) mock_response.add_done_callback = mock_add_done_callback + mock_continuation_fn = mock.Mock(return_value=mock_response) with mock.patch("logging.config.dictConfig"), mock.patch( @@ -345,6 +358,7 @@ def mock_add_done_callback(fn): mock_continuation_fn, mock_client_call_details, mock_request ) + mock_logger.info.assert_called() mock_logger.info.assert_called_once_with( interceptor._SUMMARY_LOG_LINE.format( self._MOCK_CUSTOMER_ID, @@ -363,6 +377,10 @@ def mock_add_done_callback(fn): mock_trailing_metadata ) + # Assert that the cache is added to the interceptor + self.assertIsInstance(interceptor._cache, SimpleNamespace) + self.assertEqual(interceptor._cache.initial_response_object, self._MOCK_STREAM) + mock_logger.debug.assert_called_once_with( interceptor._FULL_REQUEST_LOG_LINE.format( self._MOCK_METHOD, @@ -429,7 +447,7 @@ def test_intercept_unary_stream_failed_request(self): LoggingInterceptor should call _logger.warning and _logger.info with a specific str parameter when a request fails. """ - mock_response = self._get_mock_response(failed=True) + mock_response = self._get_mock_response(failed=True, streaming=True) def mock_add_done_callback(fn): fn(mock_response) @@ -467,6 +485,10 @@ def mock_add_done_callback(fn): mock_trailing_metadata ) + # Assert that the cache is added to the interceptor + self.assertIsInstance(interceptor._cache, SimpleNamespace) + self.assertEqual(interceptor._cache.initial_response_object, None) + mock_logger.info.assert_called_once_with( interceptor._FULL_FAULT_LOG_LINE.format( self._MOCK_METHOD, @@ -645,7 +667,7 @@ def test_get_customer_id_from_invalid_resource_name(self): def test_copy_message(self): """Creates a copy of the given message.""" message = customer_user_access.CustomerUserAccess() - copy = interceptor_module._copy_message(message) + copy = _copy_message(message) self.assertIsInstance(copy, message.__class__) self.assertIsNot(message, copy) @@ -654,7 +676,7 @@ def test_mask_message_fields(self): message = customer_user_access.CustomerUserAccess() message.email_address = "test@test.com" message.inviter_user_email_address = "inviter@test.com" - copy = interceptor_module._mask_message_fields( + copy = _mask_message_fields( ["email_address", "inviter_user_email_address"], message, "REDACTED" ) self.assertIsInstance(copy, message.__class__) @@ -672,7 +694,7 @@ def test_mask_message_fields_nested(self): ) ) ) - copy = interceptor_module._mask_message_fields( + copy = _mask_message_fields( [ "operation.update.email_address", "operation.update.inviter_user_email_address", @@ -690,7 +712,7 @@ def test_mask_message_fields_nested(self): def test_mask_message_fields_unset_field(self): """Field is not masked if it is not set.""" message = customer_user_access.CustomerUserAccess() - copy = interceptor_module._mask_message_fields( + copy = _mask_message_fields( ["email_address"], message, "REDACTED" ) self.assertFalse("email_address" in copy) @@ -698,7 +720,7 @@ def test_mask_message_fields_unset_field(self): def test_mask_message_fields_unset_nested(self): """Nested field is not masked if it is not set.""" message = customer_user_access_service.MutateCustomerUserAccessRequest() - copy = interceptor_module._mask_message_fields( + copy = _mask_message_fields( [ "operation.update.email_address", "operation.update.inviter_user_email_address", @@ -712,7 +734,7 @@ def test_mask_message_fields_unset_nested(self): def test_mask_message_fields_bad_field_name(self): """No error is raised if a given field is not defined on the message.""" message = customer_user_access.CustomerUserAccess() - copy = interceptor_module._mask_message_fields( + copy = _mask_message_fields( ["bad_name"], message, "REDACTED" ) self.assertFalse("email_address" in copy) @@ -721,7 +743,7 @@ def test_mask_message_fields_bad_field_name(self): def test_mask_message_fields_bad_nested_field_name(self): """No error is raised if a nested field is not defined.""" message = customer_user_access.CustomerUserAccess() - copy = interceptor_module._mask_message_fields( + copy = _mask_message_fields( ["bad_name.another_bad.yet_another"], message, "REDACTED" ) self.assertFalse("email_address" in copy) @@ -736,7 +758,7 @@ def test_mask_search_google_ads_response(self): ) ) response.results.append(row) - copy = interceptor_module._mask_message(response, "REDACTED") + copy = mask_message(response, "REDACTED") self.assertIsInstance(copy, response.__class__) self.assertIsNot(response, copy) self.assertEqual( @@ -753,7 +775,7 @@ def test_mask_search_google_ads_response_protobuf(self): ) response.results.append(row) protobuf = util.convert_proto_plus_to_protobuf(response) - copy = interceptor_module._mask_message(protobuf, "REDACTED") + copy = mask_message(protobuf, "REDACTED") self.assertIsInstance(copy, protobuf.__class__) self.assertIsNot(protobuf, copy) self.assertEqual( @@ -769,7 +791,7 @@ def test_mask_search_google_ads_stream_response(self): ) ) response.results.append(row) - copy = interceptor_module._mask_message(response, "REDACTED") + copy = mask_message(response, "REDACTED") self.assertIsInstance(copy, response.__class__) self.assertIsNot(response, copy) self.assertEqual( @@ -783,7 +805,7 @@ def test_mask_search_change_event(self): change_event=change_event.ChangeEvent(user_email="test@test.com") ) response.results.append(row) - copy = interceptor_module._mask_message(response, "REDACTED") + copy = mask_message(response, "REDACTED") self.assertEqual(copy.results[0].change_event.user_email, "REDACTED") def test_mask_search_places_location_feed_data(self): @@ -797,7 +819,7 @@ def test_mask_search_places_location_feed_data(self): ) ) response.results.append(row) - copy = interceptor_module._mask_message(response, "REDACTED") + copy = mask_message(response, "REDACTED") self.assertEqual( copy.results[0].feed.places_location_feed_data.email_address, "REDACTED", @@ -809,7 +831,7 @@ def test_mask_customer_user_access(self): email_address="test@test.com", inviter_user_email_address="inviter@test.com", ) - copy = interceptor_module._mask_message( + copy = mask_message( customer_user_access_obj, "REDACTED" ) self.assertIsInstance(copy, customer_user_access_obj.__class__) @@ -827,7 +849,7 @@ def test_mask_mutate_customer_user_access_request(self): ) ) ) - copy = interceptor_module._mask_message(request, "REDACTED") + copy = mask_message(request, "REDACTED") self.assertIsInstance(copy, request.__class__) self.assertIsNot(copy, request) self.assertEqual(copy.operation.update.email_address, "REDACTED") @@ -840,7 +862,7 @@ def test_mask_create_customer_client_request(self): request = customer_service.CreateCustomerClientRequest( email_address="test@test.com", ) - copy = interceptor_module._mask_message(request, "REDACTED") + copy = mask_message(request, "REDACTED") self.assertIsInstance(copy, request.__class__) self.assertIsNot(copy, request) self.assertEqual(copy.email_address, "REDACTED") @@ -852,7 +874,7 @@ def test_mask_places_location_feed_data(self): email_address="test@test.com" ) ) - copy = interceptor_module._mask_message(message, "REDACTED") + copy = mask_message(message, "REDACTED") self.assertIsInstance(copy, message.__class__) self.assertIsNot(copy, message) self.assertEqual( @@ -864,6 +886,6 @@ def test_mask_customer_user_access_invitation_email(self): message = customer_user_access_invitation.CustomerUserAccessInvitation( email_address="test@test.com" ) - copy = interceptor_module._mask_message(message, "REDACTED") + copy = mask_message(message, "REDACTED") self.assertIsInstance(copy, message.__class__) self.assertEqual(copy.email_address, "REDACTED")