Hello,
Im trying to use the BatchJob service in python.
Im creating adgroups with in total ~100000 keywords and it takes a lot of time(sometimes also stucked).
I have read the code in the api documentation.
My question is:
1. There a efficient way to create this batch jobs?\
2. There is a limit of operations?
3. How can we improve it(best practices)?
This is my code(took from internet):
def run_operations_as_batch_job(account_id, wait_for_response=False, *operations):
set_adwords_client(account_id)
job_helper = adwords_client.GetBatchJobHelper()
job = create_batch_job()
upload_url = job['uploadUrl']['url']
batch_job_id = job['id']
job_helper.UploadOperations(upload_url, *operations)
while wait_for_response:
try:
sleep(5)
url = GetBatchJobDownloadUrlWhenReady(batch_job_id)
response = urllib2.urlopen(url).read()
return job_helper.ParseResponse(response)
except:
pass
return batch_job_id
def GetBatchJobDownloadUrlWhenReady(batch_job_id, max_poll_attempts=10):
batch_job = get_batch_job(batch_job_id)
poll_attempt = 0
while poll_attempt < max_poll_attempts and batch_job['status'] in ('DONE', 'ACTIVE', 'AWAITING_FILE', 'CANCELING'):
if 'downloadUrl' in batch_job:
url = batch_job['downloadUrl']['url']
print ('Batch Job with Id "%s", Status "%s", and DownloadUrl "%s" ready.'
% (batch_job['id'], batch_job['status'], url))
return url
sleep_interval = (30 * (2 ** poll_attempt) +
(random.randint(0, 10000) / 1000))
print 'Batch Job not ready, sleeping for %s seconds.' % sleep_interval
sleep(sleep_interval)
batch_job = get_batch_job(batch_job_id)
poll_attempt += 1
raise Exception('Batch Job not finished downloading. Try checking later.')
def get_batch_job(batch_job_id):
batch_job_service = get_service(Services.BATCH_JOB_SERVICE)
selector = {
'fields': ['Id', 'Status', 'DownloadUrl'],
'predicates': [{'field': 'Id', 'operator': 'EQUALS', 'values': [batch_job_id]}]
}
return batch_job_service.get(selector)['entries'][0]