I am using the google ads api for keyword research on my website but it takes too long.
Simply getting the data from the google database takes 6 seconds. I have removed anything that modifies the data so I am sure that it takes 6 seconds to get the data.
This is the code for getting the keyword volume, competition and ideas.
import csv
import time
import suds
import re
from googleads import adwords
import os
class Ads(object):
def __init__(self, keywords):
self.num_calls = 0
self.langID = 1000
self.locID = 2840
self.adwords_client = adwords.AdWordsClient.LoadFromStorage('googleads.yaml')
self.before_estimate = keywords
# set up queries for search volume
list_of_calls = [self.before_estimate[i:i + 200] for i in range(0, len(self.before_estimate), 200)]
self.num_calls = len(list_of_calls)
print("Get ready for ", self.num_calls, "calls and approximately ", self.num_calls * 50, "seconds.")
self.results = self.get_estimate_search_volume(list_of_calls)
def api_call_function(self, keyword_list, langID, locID, count, requestType):
targeting_idea_service = self.adwords_client.GetService('TargetingIdeaService', version='v201809')
offset = 0
PAGE_SIZE = 100
selector = {
'searchParameters': [
{
'xsi_type': 'RelatedToQuerySearchParameter',
'queries': keyword_list
},
{
'xsi_type': 'LanguageSearchParameter',
'languages': [{'id': langID}]
},
{
'xsi_type': 'LocationSearchParameter',
'locations': [{'id': locID}]
},
{
# Network search parameter (optional)
'xsi_type': 'NetworkSearchParameter',
'networkSetting': {
'targetGoogleSearch': True,
'targetSearchNetwork': False,
'targetContentNetwork': False,
'targetPartnerSearchNetwork': False
}
},
{
'xsi_type': 'IdeaTextFilterSearchParameter',
'included': keyword_list
}
],
'ideaType': 'KEYWORD',
'requestType': requestType,
'requestedAttributeTypes': ['KEYWORD_TEXT', 'SEARCH_VOLUME', 'COMPETITION'],
'paging': {
'startIndex': str(offset),
'numberResults': count
}
}
page = targeting_idea_service.get(selector)
# Create dictionary with keyword, search volume pair
all_results = {}
for result in page['entries']:
for attribute in result['data']:
if attribute['key'] == "KEYWORD_TEXT":
word = attribute['value']['value']
if attribute['key'] == "SEARCH_VOLUME":
volume = attribute['value']['value']
if attribute['key'] == "COMPETITION":
competition = attribute['value']['value']
if competition == 0.0 or competition is None:
pass
else:
all_results[word] = [volume, competition]
return all_results
def api_call_error_handling(self, errors, current_call, i):
def save_and_delete_keywords(keyword_file, error_lines):
# delete bad keywords and log those deleted
deleted_keyword_file = "deleted_" + keyword_file
deleted_list = []
with open(deleted_keyword_file, "a") as d_file:
for bad_words in error_lines:
deleted = current_call.pop(bad_words)
deleted_list.append(deleted)
for deleted in deleted_list:
d_file.write(deleted + "\n")
errors = str(errors)
# log errors to file
with open("duplicate_error.txt", "a") as d_file:
entry = "ERROR - CALL # " + str(i + 1) + "\n"
d_file.write(entry)
d_file.write(errors + "\n")
# get line numbers from error reading
error_lines = re.findall('\d\d*', errors)
# clean "1"
error_lines[:] = [int(x) for x in error_lines if len(x) != 1]
error_lines.sort()
error_lines.reverse()
print(error_lines)
save_and_delete_keywords(keyword_file, error_lines)
return current_call
def get_estimate_search_volume(self, list_of_calls):
# Run estimate_search_volume for each set and add to result dictionary each time
results = {}
i = 0
retry = False
errors = "none"
while i < self.num_calls:
if not retry:
current_call = list_of_calls[i]
try:
call_results = self.api_call_function(current_call, self.langID, self.locID, 800, 'IDEAS')
call_results.update(self.api_call_function(current_call, self.langID, self.locID, 800, 'STATS'))
print("CALL #", i + 1, "COMPLETED.")
results.update(call_results)
i += 1
#time.sleep(10)
retry = False
except suds.WebFault as e:
errors = e
print("ERROR LOGGED AS ERROR_LIST", e)
current_call = api_call_error_handling(errors, current_call, i)
#time.sleep(35)
retry = True
print("LETS TRY THIS ONE AGAIN. CALL #", i + 1)
return results
def write_result_to_CSV(self, results, filename):
# write result to a new CSV file
filename.rstrip('.txt')
filename = filename + ".csv"
with open(filename, "w") as output_file:
output_file.write("KEYWORD,MONTHLY_SEARCH_VOLUME,COMPETITION,LANG_ID,LOC_ID\n")
for key in results:
word = key
volume = results[key][0]
competition = results[key][1]
# Write entry to file
entry = word + ', ' + str(volume) + ', ' + str(competition) + ', ' + str(langID) + ', ' + str(locID) + '\n'
output_file.write(entry)
def __str__(self):
outputData = '\n'
for i in range(len(self.results)):
res_list = list(self.results.values())
outputData += str(list(self.results.keys())[i]) + '\n'
outputData += 'Volume: ' + str(res_list[i][0]) + '\n'
outputData += 'Competition: ' + str(res_list[i][1]) + '\n'
outputData += '\n'
return outputData
def data(self):
return self.results
print(Ads(['put keyword here']).data())