Hi,
I'm getting an internal server error when retrieving local service leads.
Only 15 ad accounts got downloaded and an error appears. Kindly guide me
The error I got is:
*InternalServerError: 500 Internal error encountered. [type_url:
"type.googleapis.com/google.ads.googleads.v17.errors.GoogleAdsFailure
<http://type.googleapis.com/google.ads.googleads.v17.errors.GoogleAdsFailure>"*
*My code is: *"import json
import time
import sys
from google.ads.googleads.client import GoogleAdsClient
from google.ads.googleads.v17 import services
from google.ads.googleads.errors import GoogleAdsException
import boto3
import datetime
from google.oauth2.credentials import Credentials
from google.auth.transport.requests import Request
from botocore.exceptions import ClientError
import pandas as pd
import awswrangler as wr
import requests
import re
get_charge_status = {
False: 'NOT_CHARGED',
True: 'CHARGED'
}
class LeadProcessor:
def __init__(self, catalog_curr_table):
self.catalog_curr_table = catalog_curr_table
self.new_folder = int(time.time())
self.snapshot_date = datetime.datetime.now()
self.s3 = boto3.client('s3')
# Delete the existing Athena table if it exists
self.delete_table()
def delete_table(self):
"""Delete the Athena table if it exists."""
try:
table_deleted =
wr.catalog.delete_table_if_exists(database='fb',
table=self.catalog_curr_table)
if table_deleted:
print(f"Table {self.catalog_curr_table} exists and was
deleted.")
else:
print(f"Table {self.catalog_curr_table} does not exist. No
deletion necessary.")
except Exception as e:
print(f"Failed to delete table {self.catalog_curr_table}: {e}")
raise
def save_s3(self, df, account_id):
"""Save DataFrame to S3 as Parquet with Glue catalog
registration."""
try:
wr.s3.to_parquet(
df=df,
path=path,
mode='append',
dataset=True,
database='db',
table=self.catalog_curr_table,
use_threads=True
)
print(f"Parquet saved into S3 folder for account {account_id}")
except Exception as e:
print(f"Failed to write parquet for account {account_id}.....")
print("Error Details ==== ", e)
raise
def get_leads(client, AccountID):
"""Fetches leads from the Google Ads API and returns them as a list."""
leads = []
ga_service = client.get_service("GoogleAdsService")
query = """
SELECT
local_services_lead.id, customer.id, customer.descriptive_name,
local_services_lead_conversation.event_date_time,
local_services_lead.lead_type,
local_services_lead.category_id,
local_services_lead.lead_charged,
local_services_lead.lead_status,
local_services_lead_conversation.phone_call_details.call_duration_millis,
local_services_lead.contact_details,
local_services_lead_conversation.message_details.text
FROM local_services_lead_conversation
"""
stream = ga_service.search_stream(customer_id=AccountID, query=query)
for batch in stream:
for row in batch.results:
contact_details =
str(row.local_services_lead.contact_details).replace("\n",
"").replace("\r", "")
contact_details = re.sub(r'"\+([0-9]+)"', r'+\1',
contact_details).replace('"', '""')
lead_string = (
f"{row.local_services_lead.id};{row.customer.id
};{row.customer.descriptive_name};"
f"{row.local_services_lead_conversation.event_date_time};{row.local_services_lead.lead_type};"
f"{row.local_services_lead.category_id};{get_charge_status.get(row.local_services_lead.lead_charged)};"
f"{row.local_services_lead_conversation.phone_call_details.call_duration_millis};"
f"{contact_details}"
)
leads.append(lead_string)
return leads
# Initialize Google Ads Client
BUCKET_NAME = 'name'
FILE_KEY = 'google-ads.yaml'
LOCAL_PATH = '/tmp/google-ads.yaml'
s3 = boto3.client('s3')
s3.download_file(BUCKET_NAME, FILE_KEY, LOCAL_PATH)
googleads_client = GoogleAdsClient.load_from_storage(LOCAL_PATH)
# Initialize the LeadProcessor
catalog_curr_table = 'test_leads_relation2'
lead_processor = LeadProcessor(catalog_curr_table=catalog_curr_table)
# Account IDs (replace with actual account list from your source)
#
account_ids = ['101295149', '1012979603', '1017182690'] # Use 100 accounts
max for now
all_leads = []
processed_count = 0 # Track how many accounts have been processed
# Process each client and store leads incrementally to S3
for accountID in account_ids:
print(f"Processing client: {accountID}")
# Fetch leads for the current account
response = get_leads(googleads_client, str(accountID))
# Convert collected leads to DataFrame
df = pd.DataFrame([x.split(';') for x in response], columns=[
'lead_id', 'account_id', 'business_name',
'lead_creation_timestamp', 'lead_type',
'lead_category', 'charge_status',
'phone_lead_charged_connected_call_duration_seconds',
'phone_lead_consumer_phone_number'
])
# Convert 'lead_creation_timestamp' to datetime
df['lead_creation_timestamp'] =
pd.to_datetime(df['lead_creation_timestamp'])
# Add 'snapshot_date' and 'date_id' fields
df['snapshot_date'] = lead_processor.snapshot_date # Use snapshot date
from LeadProcessor class
df['date_id'] = df['lead_creation_timestamp'].dt.strftime('%Y%m%d') #
Format as YYYYMMDD
df['date_id'] = df['date_id'].astype(int) # Convert to integer
# Save the current account leads to S3 incrementally
if df.empty:
print("DataFrame is empty. Skipping S3 append.")
time.sleep(3)
else:
lead_processor.save_s3(df, accountID)
processed_count += 1
# Add a 5-second delay after processing 10 accounts
if processed_count % 10 == 0:
print("Processed 10 accounts, sleeping for 5 seconds...")
time.sleep(5)
print("All accounts processed and leads saved to S3.")"
--
--
=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
Also find us on our blog:
https://googleadsdeveloper.blogspot.com/
=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
You received this message because you are subscribed to the Google
Groups "AdWords API and Google Ads API Forum" group.
To post to this group, send email to [email protected]
To unsubscribe from this group, send email to
[email protected]
For more options, visit this group at
http://groups.google.com/group/adwords-api?hl=en
---
You received this message because you are subscribed to the Google Groups
"Google Ads API and AdWords API Forum" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
To view this discussion on the web visit
https://groups.google.com/d/msgid/adwords-api/CAPDiXFq8Eb8bC0%3Dx9zgN%3DWKLaewQv3Y%3DDJMEph-%2BSKJL%2BkQacw%40mail.gmail.com.