# we prepare the kwds to receive the data by adding the proper column
kwds[ ,c("spell", "location_code", "language_code", "search_partners", "competition", "competition_index","search_volume", "low_top_of_page_bid", "high_top_of_page_bid")] <- NA
for(i in seq(from=1, to=nrow(kwds), by=pas)){ # for-loop over rows
if(is.null(kwds[i,"search_volume"]) || is.na(kwds[i,"search_volume"])){
# building the list of kwd to request
data <- paste0('[{"device":"all", "search_partners":false, "keywords":["', kwds[i,"Kwd"])
for (idkwd in 1:(pas-1)) {
if(!is.null(kwds[i+idkwd,"Kwd"]) && !is.na(kwds[i+idkwd,"Kwd"])){
data <- paste0(data,'", "',kwds[i+idkwd,"Kwd"])
data <- paste0(data, '"], "location_code":2840, "language_code":"en", "sort_by":"search_volume"}]')
res <- httr::POST(url = 'https://api.dataforseo.com/v3/keywords_data/google_ads/search_volume/live', httr::add_headers(.headers=headers), body = data)
res_text <- httr::content(res, "text")
res_json <- jsonlite::fromJSON(res_text, flatten = TRUE)
batch <- as.data.frame(do.call(cbind, res_json[["tasks"]][["result"]][[1]]))
batch <- data.frame(lapply(batch, as.character), stringsAsFactors=FALSE)
data.table::setnames(batch, "keyword", "Kwd")
batch$monthly_searches <- NULL
# inserting result inside our main data frame kwds
kwds[match(batch$Kwd, kwds$Kwd), ] <- batch
message(i, " ",kwds[i,"Kwd"], " OK")
# make the script sleep between each request
# we don't want to go over the API hit rate limit
# save on the hard drive the results, for the paranoid
write.csv(kwds, "kwds.csv")
# Do this if an error is caught...
message(i, " ",kwds[i,"Kwd"], " error")
# Do this if an warning is caught...
message(i, " ",kwds[i,"Kwd"], " warning")
# Do this at the end before quitting the tryCatch structure...