Most developers know the basics of the Fetch API: call fetch(url), await the response, call .json(). But the Fetch API is far more capable than that. In production apps, you need cancellation, timeouts, retries, interceptors, and streaming. This post covers all of it.
Request Cancellation with AbortController
One of the most overlooked features of Fetch is the ability to cancel in-flight requests using AbortController. This is critical for search-as-you-type, navigation, and unmounting components.
const controller = new AbortController();
const signal = controller.signal;
async function fetchWithCancel(url) {
try {
const response = await fetch(url, { signal });
return await response.json();
} catch (err) {
if (err.name === "AbortError") {
console.log("Request was cancelled");
return null;
}
throw err;
}
}
// Cancel it at any time
controller.abort();
Auto-Cancel Debounced Search
let controller = null;
async function search(query) {
// Cancel the previous request if still running
if (controller) {
controller.abort();
}
controller = new AbortController();
try {
const response = await fetch(`/api/search?q=${encodeURIComponent(query)}`, {
signal: controller.signal,
});
const data = await response.json();
renderResults(data);
} catch (err) {
if (err.name !== "AbortError") {
console.error("Search failed:", err);
}
}
}
Request Timeouts
Fetch has no built-in timeout. Use AbortController with setTimeout:
async function fetchWithTimeout(url, options = {}, timeoutMs = 5000) {
const controller = new AbortController();
const id = setTimeout(() => controller.abort(), timeoutMs);
try {
const response = await fetch(url, {
...options,
signal: controller.signal,
});
clearTimeout(id);
return response;
} catch (err) {
clearTimeout(id);
if (err.name === "AbortError") {
throw new Error(`Request timed out after ${timeoutMs}ms`);
}
throw err;
}
}
// Usage
const response = await fetchWithTimeout("/api/data", {}, 3000);
Retry Logic with Exponential Backoff
Network failures are temporary. A robust client retries automatically with exponential backoff to avoid hammering a struggling server.
async function fetchWithRetry(url, options = {}, retries = 3, backoff = 300) {
for (let attempt = 0; attempt <= retries; attempt++) {
try {
const response = await fetch(url, options);
// Only retry on server errors (5xx), not client errors (4xx)
if (!response.ok && response.status >= 500 && attempt < retries) {
const delay = backoff * Math.pow(2, attempt);
console.warn(
`Attempt ${attempt + 1} failed (${response.status}). Retrying in ${delay}ms...`,
);
await new Promise((resolve) => setTimeout(resolve, delay));
continue;
}
return response;
} catch (err) {
// Retry on network errors (except AbortError)
if (err.name === "AbortError" || attempt === retries) throw err;
const delay = backoff * Math.pow(2, attempt);
console.warn(
`Network error on attempt ${attempt + 1}. Retrying in ${delay}ms...`,
);
await new Promise((resolve) => setTimeout(resolve, delay));
}
}
}
// Usage
const response = await fetchWithRetry("/api/data", {}, 3, 500);
const data = await response.json();
Request and Response Interceptors
Interceptors let you globally transform requests (add auth headers) and responses (handle 401s) without repeating that logic everywhere.
class FetchClient {
constructor(baseUrl = "") {
this.baseUrl = baseUrl;
this.requestInterceptors = [];
this.responseInterceptors = [];
}
addRequestInterceptor(fn) {
this.requestInterceptors.push(fn);
return this;
}
addResponseInterceptor(fn) {
this.responseInterceptors.push(fn);
return this;
}
async request(url, options = {}) {
// Run all request interceptors
let config = { url: this.baseUrl + url, ...options };
for (const interceptor of this.requestInterceptors) {
config = await interceptor(config);
}
let response = await fetch(config.url, config);
// Run all response interceptors
for (const interceptor of this.responseInterceptors) {
response = await interceptor(response);
}
return response;
}
get(url, options = {}) {
return this.request(url, { ...options, method: "GET" });
}
post(url, body, options = {}) {
return this.request(url, {
...options,
method: "POST",
headers: { "Content-Type": "application/json", ...options.headers },
body: JSON.stringify(body),
});
}
}
// Create a client with auth and 401 handling
const apiClient = new FetchClient("https://api.example.com");
apiClient
.addRequestInterceptor(async (config) => ({
...config,
headers: {
...config.headers,
Authorization: `Bearer ${localStorage.getItem("token")}`,
},
}))
.addResponseInterceptor(async (response) => {
if (response.status === 401) {
// Refresh token and retry, or redirect to login
await refreshToken();
}
return response;
});
// Usage
const res = await apiClient.get("/users/me");
const user = await res.json();
Streaming Responses
For large payloads or AI-generated text, you can process data chunk by chunk using response.body as a ReadableStream. This enables streaming UIs where content appears progressively.
async function streamResponse(url) {
const response = await fetch(url);
if (!response.ok) throw new Error(`HTTP ${response.status}`);
if (!response.body) throw new Error("ReadableStream not supported");
const reader = response.body.getReader();
const decoder = new TextDecoder("utf-8");
let result = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
result += chunk;
// Update UI progressively
document.getElementById("output").textContent = result;
}
return result;
}
Streaming Server-Sent Events (SSE)
async function streamSSE(url, onMessage) {
const response = await fetch(url, {
headers: { Accept: "text/event-stream" },
});
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() ?? ""; // Keep incomplete line in buffer
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (data !== "[DONE]") {
onMessage(JSON.parse(data));
}
}
}
}
}
// Usage: streaming AI responses
streamSSE("/api/chat/stream", (chunk) => {
appendText(chunk.content);
});
Uploading Files with Progress
fetch doesn’t expose upload progress natively, but you can use XMLHttpRequest wrapped in a Promise for that:
function uploadWithProgress(url, file, onProgress) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.upload.addEventListener("progress", (event) => {
if (event.lengthComputable) {
const percent = Math.round((event.loaded / event.total) * 100);
onProgress(percent);
}
});
xhr.addEventListener("load", () => {
if (xhr.status >= 200 && xhr.status < 300) {
resolve(JSON.parse(xhr.responseText));
} else {
reject(new Error(`Upload failed: ${xhr.status}`));
}
});
xhr.addEventListener("error", () => reject(new Error("Network error")));
const formData = new FormData();
formData.append("file", file);
xhr.open("POST", url);
xhr.send(formData);
});
}
// Usage
const input = document.getElementById("file-input");
input.addEventListener("change", async () => {
const file = input.files[0];
const result = await uploadWithProgress("/api/upload", file, (percent) => {
console.log(`Upload: ${percent}%`);
});
console.log("Uploaded:", result);
});
A Production-Ready Fetch Wrapper
Combining everything above into one reusable utility:
async function apiFetch(
url,
{
method = "GET",
body,
headers = {},
retries = 2,
timeoutMs = 10000,
signal,
} = {},
) {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
// Merge external signal if provided
if (signal) {
signal.addEventListener("abort", () => controller.abort());
}
const token = getAuthToken(); // Your auth token function
const config = {
method,
signal: controller.signal,
headers: {
"Content-Type": "application/json",
...(token ? { Authorization: `Bearer ${token}` } : {}),
...headers,
},
...(body ? { body: JSON.stringify(body) } : {}),
};
let lastError;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
const response = await fetch(url, config);
clearTimeout(timeoutId);
if (!response.ok) {
// Parse error body if possible
const errorData = await response.json().catch(() => ({}));
const error = new Error(errorData.message ?? `HTTP ${response.status}`);
error.status = response.status;
error.data = errorData;
// Don't retry client errors
if (response.status < 500) throw error;
lastError = error;
} else {
return response;
}
} catch (err) {
clearTimeout(timeoutId);
if (err.name === "AbortError")
throw new Error("Request timed out or cancelled");
if (attempt === retries) throw lastError ?? err;
lastError = err;
}
// Exponential backoff before retry
if (attempt < retries) {
await new Promise((r) => setTimeout(r, 300 * Math.pow(2, attempt)));
}
}
throw lastError;
}
// Usage
const response = await apiFetch("/api/users", {
method: "POST",
body: { name: "Alice" },
});
const user = await response.json();
Key Takeaways
| Feature | How |
|---|---|
| Cancel requests | AbortController + signal |
| Timeouts | AbortController + setTimeout |
| Retries | Loop with exponential backoff |
| Auth headers | Request interceptors |
| Handle 401 | Response interceptors |
| Stream large data | response.body.getReader() |
| Upload progress | XMLHttpRequest wrapped in Promise |
The native Fetch API is powerful enough for almost any use case. Rather than reaching for axios or other HTTP libraries, build a thin wrapper that handles your specific needs — you’ll end up with less dependency surface area and full control over behavior.
💬 Have questions or want to discuss further? Join the King Technologies Discord — where code meets community!