ChartQuery

Scrape Google Search Results

Build a SERP monitoring script that tracks keyword rankings and extracts organic results using the Autom API.

Overview

In this playbook you will build a script that queries Google Search and extracts structured organic results — positions, titles, URLs, and snippets — for any keyword. A typical use case is rank tracking: run this on a schedule to monitor where your pages appear for target keywords.

The endpoint returns up to 10 organic results per page and supports pagination, country (gl) and language (hl) targeting.

Prerequisites

  • An Autom API key — get one at app.autom.dev
  • Install dependencies for your language:
pip install requests

No extra dependencies — uses the native fetch API (Node 18+).

curl extension enabled (on by default in most PHP installs).

No extra dependencies — uses the net/http standard library (Go 1.18+).

No extra dependencies — uses java.net.http (Java 11+).

No extra dependencies — uses System.Net.Http (.NET 6+).

# Cargo.toml
[dependencies]
reqwest = { version = "0.12", features = ["json"] }
tokio = { version = "1", features = ["full"] }
serde_json = "1"

Steps

Make your first search request

Call GET /v1/google/search with the q parameter and your API key in the x-api-key header.

import requests

API_KEY = "YOUR_API_KEY"

response = requests.get(
    "https://api.autom.dev/v1/google/search",
    headers={"x-api-key": API_KEY},
    params={"q": "best python web scraping libraries", "gl": "us", "hl": "en"},
)

data = response.json()
print(data)
const API_KEY = "YOUR_API_KEY";

const params = new URLSearchParams({
  q: "best python web scraping libraries",
  gl: "us",
  hl: "en",
});

const response = await fetch(
  `https://api.autom.dev/v1/google/search?${params}`,
  { headers: { "x-api-key": API_KEY } },
);

const data = await response.json();
console.log(data);
<?php
$apiKey = "YOUR_API_KEY";

$params = http_build_query([
  "q"  => "best python web scraping libraries",
  "gl" => "us",
  "hl" => "en",
]);

$ch = curl_init("https://api.autom.dev/v1/google/search?{$params}");
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_HTTPHEADER, ["x-api-key: {$apiKey}"]);

$data = json_decode(curl_exec($ch), true);
curl_close($ch);
print_r($data);
package main

import (
    "encoding/json"
    "fmt"
    "io"
    "net/http"
    "net/url"
)

func main() {
    apiKey := "YOUR_API_KEY"

    params := url.Values{}
    params.Set("q",  "best python web scraping libraries")
    params.Set("gl", "us")
    params.Set("hl", "en")

    req, _ := http.NewRequest("GET",
        "https://api.autom.dev/v1/google/search?"+params.Encode(), nil)
    req.Header.Set("x-api-key", apiKey)

    resp, _ := http.DefaultClient.Do(req)
    defer resp.Body.Close()

    body, _ := io.ReadAll(resp.Body)
    var data map[string]any
    json.Unmarshal(body, &data)
    fmt.Println(data)
}
import java.net.URI;
import java.net.URLEncoder;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;

public class Main {
    public static void main(String[] args) throws Exception {
        var apiKey = "YOUR_API_KEY";
        var q      = URLEncoder.encode("best python web scraping libraries", StandardCharsets.UTF_8);
        var url    = "https://api.autom.dev/v1/google/search?q=" + q + "&gl=us&hl=en";

        var client  = HttpClient.newHttpClient();
        var request = HttpRequest.newBuilder()
            .uri(URI.create(url))
            .header("x-api-key", apiKey)
            .GET()
            .build();

        var response = client.send(request, HttpResponse.BodyHandlers.ofString());
        System.out.println(response.body());
    }
}
using System.Net.Http;

var apiKey = "YOUR_API_KEY";
using var client = new HttpClient();
client.DefaultRequestHeaders.Add("x-api-key", apiKey);

var url      = "https://api.autom.dev/v1/google/search?q=best+python+web+scraping+libraries&gl=us&hl=en";
var response = await client.GetAsync(url);
var body     = await response.Content.ReadAsStringAsync();
Console.WriteLine(body);
use reqwest::header;

#[tokio::main]
async fn main() -> Result<(), reqwest::Error> {
    let client = reqwest::Client::new();

    let response = client
        .get("https://api.autom.dev/v1/google/search")
        .header("x-api-key", "YOUR_API_KEY")
        .query(&[("q", "best python web scraping libraries"), ("gl", "us"), ("hl", "en")])
        .send()
        .await?
        .json::<serde_json::Value>()
        .await?;

    println!("{:#?}", response);
    Ok(())
}

Parse the organic results

The response contains an organic_results array. Each entry has position, title, link, snippet, domain, and source.

for result in data["organic_results"]:
    print(f"[{result['position']}] {result['title']}")
    print(f"    URL: {result['link']}")
    print(f"    {result.get('snippet', '')}")
    print()
for (const result of data.organic_results) {
  console.log(`[${result.position}] ${result.title}`);
  console.log(`    URL: ${result.link}`);
  console.log(`    ${result.snippet ?? ""}`);
  console.log();
}
foreach ($data["organic_results"] as $result) {
    echo "[{$result['position']}] {$result['title']}\n";
    echo "    URL: {$result['link']}\n";
    echo "    " . ($result['snippet'] ?? "") . "\n\n";
}
results := data["organic_results"].([]any)
for _, r := range results {
    item := r.(map[string]any)
    fmt.Printf("[%.0f] %s\n", item["position"], item["title"])
    fmt.Printf("    URL: %s\n", item["link"])
    fmt.Printf("    %s\n\n", item["snippet"])
}
import org.json.JSONArray;
import org.json.JSONObject;

// Add org.json:json to your build tool, or parse manually
var json    = new JSONObject(response.body());
var results = json.getJSONArray("organic_results");

for (int i = 0; i < results.length(); i++) {
    var r = results.getJSONObject(i);
    System.out.printf("[%d] %s%n", r.getInt("position"), r.getString("title"));
    System.out.printf("    URL: %s%n%n", r.getString("link"));
}
using System.Text.Json;

var json    = JsonDocument.Parse(body);
var results = json.RootElement.GetProperty("organic_results").EnumerateArray();

foreach (var result in results)
{
    Console.WriteLine($"[{result.GetProperty("position")}] {result.GetProperty("title")}");
    Console.WriteLine($"    URL: {result.GetProperty("link")}");
    Console.WriteLine();
}
if let Some(results) = response["organic_results"].as_array() {
    for result in results {
        println!(
            "[{}] {}",
            result["position"],
            result["title"].as_str().unwrap_or("")
        );
        println!("    URL: {}", result["link"].as_str().unwrap_or(""));
        println!("    {}", result["snippet"].as_str().unwrap_or(""));
        println!();
    }
}

Handle pagination

Use the page parameter to fetch subsequent pages. The response includes a pagination object with has_next_page and next page number.

import requests

API_KEY = "YOUR_API_KEY"
QUERY   = "best python web scraping libraries"

def fetch_all_results(query: str, max_pages: int = 3) -> list:
    all_results = []
    page = 1

    while page <= max_pages:
        response = requests.get(
            "https://api.autom.dev/v1/google/search",
            headers={"x-api-key": API_KEY},
            params={"q": query, "gl": "us", "hl": "en", "page": page},
        )
        data = response.json()
        all_results.extend(data.get("organic_results", []))

        if not data.get("pagination", {}).get("has_next_page"):
            break
        page += 1

    return all_results

results = fetch_all_results(QUERY)
for r in results:
    print(f"[{r['position']}] {r['title']}{r['link']}")
const API_KEY = "YOUR_API_KEY";

async function fetchAllResults(query: string, maxPages = 3) {
  const allResults: any[] = [];
  let page = 1;

  while (page <= maxPages) {
    const params = new URLSearchParams({ q: query, gl: "us", hl: "en", page: String(page) });
    const res = await fetch(`https://api.autom.dev/v1/google/search?${params}`, {
      headers: { "x-api-key": API_KEY },
    });
    const data = await res.json();
    allResults.push(...(data.organic_results ?? []));

    if (!data.pagination?.has_next_page) break;
    page++;
  }

  return allResults;
}

const results = await fetchAllResults("best python web scraping libraries");
for (const r of results) {
  console.log(`[${r.position}] ${r.title} — ${r.link}`);
}
<?php
function fetchAllResults(string $apiKey, string $query, int $maxPages = 3): array {
    $allResults = [];
    $page = 1;

    while ($page <= $maxPages) {
        $params = http_build_query(["q" => $query, "gl" => "us", "hl" => "en", "page" => $page]);
        $ch = curl_init("https://api.autom.dev/v1/google/search?{$params}");
        curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
        curl_setopt($ch, CURLOPT_HTTPHEADER, ["x-api-key: {$apiKey}"]);
        $data = json_decode(curl_exec($ch), true);
        curl_close($ch);

        $allResults = array_merge($allResults, $data["organic_results"] ?? []);
        if (!($data["pagination"]["has_next_page"] ?? false)) break;
        $page++;
    }
    return $allResults;
}

$results = fetchAllResults("YOUR_API_KEY", "best python web scraping libraries");
foreach ($results as $r) {
    echo "[{$r['position']}] {$r['title']} — {$r['link']}\n";
}
package main

import (
    "encoding/json"
    "fmt"
    "io"
    "net/http"
    "net/url"
    "strconv"
)

func fetchAllResults(apiKey, query string, maxPages int) []map[string]any {
    var all []map[string]any

    for page := 1; page <= maxPages; page++ {
        params := url.Values{"q": {query}, "gl": {"us"}, "hl": {"en"}, "page": {strconv.Itoa(page)}}
        req, _ := http.NewRequest("GET", "https://api.autom.dev/v1/google/search?"+params.Encode(), nil)
        req.Header.Set("x-api-key", apiKey)

        resp, _ := http.DefaultClient.Do(req)
        body, _ := io.ReadAll(resp.Body)
        resp.Body.Close()

        var data map[string]any
        json.Unmarshal(body, &data)

        if items, ok := data["organic_results"].([]any); ok {
            for _, item := range items {
                all = append(all, item.(map[string]any))
            }
        }

        pagination, _ := data["pagination"].(map[string]any)
        if pagination["has_next_page"] != true {
            break
        }
    }
    return all
}

func main() {
    results := fetchAllResults("YOUR_API_KEY", "best python web scraping libraries", 3)
    for _, r := range results {
        fmt.Printf("[%.0f] %s%s\n", r["position"], r["title"], r["link"])
    }
}
import java.net.URI;
import java.net.URLEncoder;
import java.net.http.*;
import java.nio.charset.StandardCharsets;
import java.util.*;
import org.json.*;

public class Main {
    static HttpClient client = HttpClient.newHttpClient();
    static String API_KEY   = "YOUR_API_KEY";

    static List<JSONObject> fetchAllResults(String query, int maxPages) throws Exception {
        var all  = new ArrayList<JSONObject>();
        var q    = URLEncoder.encode(query, StandardCharsets.UTF_8);

        for (int page = 1; page <= maxPages; page++) {
            var url     = "https://api.autom.dev/v1/google/search?q=" + q + "&gl=us&hl=en&page=" + page;
            var request = HttpRequest.newBuilder().uri(URI.create(url))
                .header("x-api-key", API_KEY).GET().build();
            var resp    = client.send(request, HttpResponse.BodyHandlers.ofString());
            var json    = new JSONObject(resp.body());
            var results = json.optJSONArray("organic_results");

            if (results != null) {
                for (int i = 0; i < results.length(); i++) all.add(results.getJSONObject(i));
            }
            if (!json.optJSONObject("pagination").optBoolean("has_next_page")) break;
        }
        return all;
    }

    public static void main(String[] args) throws Exception {
        for (var r : fetchAllResults("best python web scraping libraries", 3)) {
            System.out.printf("[%d] %s — %s%n", r.getInt("position"), r.getString("title"), r.getString("link"));
        }
    }
}
using System.Net.Http;
using System.Text.Json;

var apiKey = "YOUR_API_KEY";
using var client = new HttpClient();
client.DefaultRequestHeaders.Add("x-api-key", apiKey);

var allResults = new List<JsonElement>();
int page = 1, maxPages = 3;

while (page <= maxPages)
{
    var url      = $"https://api.autom.dev/v1/google/search?q=best+python+web+scraping+libraries&gl=us&hl=en&page={page}";
    var response = await client.GetAsync(url);
    var body     = await response.Content.ReadAsStringAsync();
    var json     = JsonDocument.Parse(body).RootElement;

    foreach (var result in json.GetProperty("organic_results").EnumerateArray())
        allResults.Add(result);

    var hasNext = json.GetProperty("pagination").GetProperty("has_next_page").GetBoolean();
    if (!hasNext) break;
    page++;
}

foreach (var r in allResults)
    Console.WriteLine($"[{r.GetProperty("position")}] {r.GetProperty("title")}{r.GetProperty("link")}");
use reqwest::Client;
use serde_json::Value;

async fn fetch_all_results(client: &Client, api_key: &str, query: &str, max_pages: u32) -> Vec<Value> {
    let mut all_results = Vec::new();
    let mut page = 1u32;

    while page <= max_pages {
        let resp = client
            .get("https://api.autom.dev/v1/google/search")
            .header("x-api-key", api_key)
            .query(&[("q", query), ("gl", "us"), ("hl", "en"), ("page", &page.to_string())])
            .send().await.unwrap()
            .json::<Value>().await.unwrap();

        if let Some(results) = resp["organic_results"].as_array() {
            all_results.extend(results.clone());
        }
        if !resp["pagination"]["has_next_page"].as_bool().unwrap_or(false) { break; }
        page += 1;
    }
    all_results
}

#[tokio::main]
async fn main() -> Result<(), reqwest::Error> {
    let client  = Client::new();
    let results = fetch_all_results(&client, "YOUR_API_KEY", "best python web scraping libraries", 3).await;

    for r in &results {
        println!("[{}] {} — {}", r["position"], r["title"].as_str().unwrap_or(""), r["link"].as_str().unwrap_or(""));
    }
    Ok(())
}

Use the gl parameter to target a specific country (e.g. fr for France, de for Germany) and hl for the language of results. This is essential for accurate local rank tracking.

On this page