Capture screenshots of multiple websites in one script. Automate bulk screenshot workflows for monitoring, archiving, or reporting.
# Bash loop over multiple URLs
for url in "https://google.com" "https://github.com" "https://twitter.com"; do
slug=$(echo "$url" | sed 's/https:\/\///' | sed 's/[^a-zA-Z0-9]/-/g')
curl "https://snapapi.tech/v1/screenshot?url=$url&api_key=YOUR_KEY" -o "$slug.png"
sleep 1
doneimport requests, time, re
urls = ["https://google.com", "https://github.com", "https://twitter.com"]
for url in urls:
slug = re.sub(r'[^a-zA-Z0-9]', '-', url.replace('https://', ''))
response = requests.get("https://snapapi.tech/v1/screenshot", params={
"url": url, "api_key": "YOUR_KEY"
})
with open(f"{slug}.png", "wb") as f:
f.write(response.content)
print(f"Captured {url}")
time.sleep(1)const urls = ["https://google.com", "https://github.com", "https://twitter.com"];
for (const url of urls) {
const slug = url.replace("https://", "").replace(/[^a-zA-Z0-9]/g, "-");
const res = await fetch(
"https://snapapi.tech/v1/screenshot?" + new URLSearchParams({
url, api_key: "YOUR_KEY"
})
);
require("fs").writeFileSync(`${slug}.png`, Buffer.from(await res.arrayBuffer()));
await new Promise(r => setTimeout(r, 1000));
}