Quickstart Guide
Get from zero to your first SERP data in under 5 minutes. This guide walks you through authentication, making your first request, and retrieving results.
Prerequisites
Before you begin, make sure you have:
- An API key (see Authentication for instructions)
curl, Python 3.x, or Node.js installed on your machine
Getting Started
Set Up Authentication
All API requests require a Bearer token in the Authorization header.
Replace YOUR_API_KEY with your actual API key.
# Set your API key as an environment variable
export SERPWATCH_API_KEY="your_api_key_here"
# Test authentication by getting your user info
curl -X GET "https://engine.v2.serpwatch.io/api/v1/users/me" \
-H "Authorization: Bearer $SERPWATCH_API_KEY"
import os
import requests
# Set your API key
API_KEY = os.environ.get("SERPWATCH_API_KEY", "your_api_key_here")
BASE_URL = "https://engine.v2.serpwatch.io"
# Create a session with auth headers
session = requests.Session()
session.headers.update({
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
})
# Test authentication
response = session.get(f"{BASE_URL}/api/v1/users/me")
print(response.json())
const API_KEY = process.env.SERPWATCH_API_KEY || "your_api_key_here";
const BASE_URL = "https://engine.v2.serpwatch.io";
// Helper function for API calls
async function apiRequest(endpoint, options = {}) {
const response = await fetch(`${BASE_URL}${endpoint}`, {
...options,
headers: {
"Authorization": `Bearer ${API_KEY}`,
"Content-Type": "application/json",
...options.headers
}
});
return response.json();
}
// Test authentication
const user = await apiRequest("/api/v1/users/me");
console.log(user);
Tip
Store your API key in an environment variable to keep it secure. Never commit API keys to version control.
Create a SERP Crawl Task
Submit a request to crawl Google search results for a keyword. The API returns a task ID immediately, and processing happens asynchronously.
curl -X POST "https://engine.v2.serpwatch.io/api/v2/serp/crawl" \
-H "Authorization: Bearer $SERPWATCH_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"keyword": "best project management software",
"depth": 10,
"device": "desktop",
"location_name": "United States",
"iso_code": "US",
"language_code": "en"
}'
# Create a SERP crawl task
task_response = session.post(
f"{BASE_URL}/api/v2/serp/crawl",
json={
"keyword": "best project management software",
"depth": 10,
"device": "desktop",
"location_name": "United States",
"iso_code": "US",
"language_code": "en"
}
)
task = task_response.json()
task_id = task["id"]
print(f"Task created: {task_id}")
print(f"Status: {task['status']}")
// Create a SERP crawl task
const task = await apiRequest("/api/v2/serp/crawl", {
method: "POST",
body: JSON.stringify({
keyword: "best project management software",
depth: 10,
device: "desktop",
location_name: "United States",
iso_code: "US",
language_code: "en"
})
});
const taskId = task.id;
console.log(`Task created: ${taskId}`);
console.log(`Status: ${task.status}`);
The response includes a task ID and initial status:
{
"id": 1166085028196491264,
"user_id": 1134894642391789569,
"status": "awaiting",
"keyword": "best project management software",
"location_name": "United States",
"iso_code": "US",
"device": "desktop",
"depth": 10,
"language_code": "en",
"frequency": 24,
"created_on": "2026-01-29",
"created_at": 1769712336,
"ttl": 1769798736
}
Retrieve the Results
Poll the task endpoint until the status changes to success or completed.
For production use, consider using webhooks instead of polling.
# Replace TASK_ID with your actual task ID
curl -X GET "https://engine.v2.serpwatch.io/api/v2/serp/crawl/TASK_ID" \
-H "Authorization: Bearer $SERPWATCH_API_KEY"
import time
# Poll for results (with timeout)
max_attempts = 30
for attempt in range(max_attempts):
result = session.get(f"{BASE_URL}/api/v2/serp/crawl/{task_id}").json()
if result["status"] == "success":
print("Task completed!")
organic = [r for r in result.get("result", {}).get("left", []) if r.get("type") == "organic"]
print(f"Found {len(organic)} organic results")
break
elif result["status"] == "error":
print(f"Task failed: {result.get('error_message')}")
break
else:
print(f"Status: {result['status']} - waiting...")
time.sleep(2)
else:
print("Timeout waiting for results")
// Poll for results with async/await
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
async function waitForResults(taskId, maxAttempts = 30) {
for (let i = 0; i < maxAttempts; i++) {
const result = await apiRequest(`/api/v2/serp/crawl/${taskId}`);
if (result.status === "success") {
console.log("Task completed!");
const organic = (result.result?.left || []).filter(r => r.type === "organic");
console.log(`Found ${organic.length} organic results`);
return result;
} else if (result.status === "error") {
throw new Error(`Task failed: ${result.error_message}`);
}
console.log(`Status: ${result.status} - waiting...`);
await sleep(2000);
}
throw new Error("Timeout waiting for results");
}
const results = await waitForResults(taskId);
When complete, the response includes the parsed SERP data:
{
"id": 1166085028196491264,
"user_id": 1134894642391789569,
"status": "success",
"keyword": "best project management software",
"location_name": "United States",
"iso_code": "US",
"device": "desktop",
"depth": 10,
"result": {
"left": [
{
"type": "organic",
"position": "1",
"url": "https://example.com/pm-software",
"title": "15 Best Project Management Software of 2026",
"snippet": "Compare the top project management tools..."
},
{
"type": "organic",
"position": "2",
"url": "https://reviews.com/pm-tools",
"title": "Top 10 Project Management Tools | Reviews",
"snippet": "In-depth reviews of popular PM software..."
}
],
"right": []
},
"top_10": ["example.com", "reviews.com"]
}
Next Steps
Now that you've made your first API call, explore these topics: