Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions app/api/export/csv/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import { NextRequest, NextResponse } from "next/server";
import { cacheGet } from "@/lib/cache";
import { buildCsv } from "@/lib/csv";

export const maxDuration = 30;

export async function GET(request: NextRequest) {
const runId = request.nextUrl.searchParams.get("runId");
if (!runId) {
Expand Down
2 changes: 2 additions & 0 deletions app/api/export/json/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import { NextRequest, NextResponse } from "next/server";
import { cacheGet } from "@/lib/cache";

export const maxDuration = 30;

export async function GET(request: NextRequest) {
const runId = request.nextUrl.searchParams.get("runId");
if (!runId) {
Expand Down
2 changes: 2 additions & 0 deletions app/api/scrape/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { scrapeUsers, ScrapeOptions } from "@/lib/github";
import { cacheSet } from "@/lib/cache";
import { ScrapeRequest, ScrapeResponse } from "@/lib/types/user";

export const maxDuration = 60;

export async function POST(request: Request) {
let body: ScrapeRequest;
try {
Expand Down
8 changes: 7 additions & 1 deletion lib/github.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ function sleep(ms: number): Promise<void> {

const GITHUB_API = "https://api.github.com";

/** Per-request timeout for GitHub API calls (ms). */
const GITHUB_API_TIMEOUT_MS = 30_000;

function headers(): HeadersInit {
const h: Record<string, string> = {
Accept: "application/vnd.github+json",
Expand All @@ -41,7 +44,10 @@ async function ghFetch(
maxRetries = 3
): Promise<Response> {
for (let attempt = 0; attempt <= maxRetries; attempt++) {
const res = await fetch(url, { headers: headers() });
const res = await fetch(url, {
headers: headers(),
signal: AbortSignal.timeout(GITHUB_API_TIMEOUT_MS),
});

Comment on lines +47 to 51
Copy link

Copilot AI Feb 22, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fetch() will throw on timeout (AbortError) or other network errors, which bypasses the current retry loop entirely. Since this PR introduces timeouts, consider wrapping the fetch() call in a try/catch and retrying (or at least converting AbortError into a clearer error) so a transient timeout doesn’t immediately fail the whole scrape.

Suggested change
const res = await fetch(url, {
headers: headers(),
signal: AbortSignal.timeout(GITHUB_API_TIMEOUT_MS),
});
let res: Response;
try {
res = await fetch(url, {
headers: headers(),
signal: AbortSignal.timeout(GITHUB_API_TIMEOUT_MS),
});
} catch (err) {
const isAbortError =
err instanceof Error && err.name === "AbortError";
if (attempt < maxRetries) {
// Simple backoff for transient network/timeout errors.
const backoffMs = 1000 * (attempt + 1);
await sleep(backoffMs);
continue;
}
const baseMessage = isAbortError
? `GitHub API request timed out after ${GITHUB_API_TIMEOUT_MS}ms`
: "GitHub API request failed";
const detail =
err instanceof Error && err.message
? `: ${err.message}`
: "";
throw new Error(`${baseMessage}${detail} (${url})`);
}

Copilot uses AI. Check for mistakes.
if (res.ok) return res;

Expand Down
3 changes: 3 additions & 0 deletions src/scraper.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@ const SEARCH_QUERIES = [
const octokit = new Octokit({
auth: GITHUB_TOKEN,
userAgent: "gitfast-uganda-scraper/1.0.0",
request: {
timeout: 30_000,
},
});

// ---------------------------------------------------------------------------
Expand Down