Table Of Contents
Problem
You need to set a custom User-Agent header to identify your application, bypass default blocking, or comply with API requirements that check User-Agent strings.
Solution
// Basic User-Agent with fetch()
async function fetchWithUserAgent(url) {
const response = await fetch(url, {
headers: {
'User-Agent': 'MyApp/1.0.0 (Node.js)'
}
});
return await response.json();
}
// Browser-like User-Agent
async function fetchWithBrowserUA(url) {
const response = await fetch(url, {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
return await response.json();
}
// Dynamic User-Agent
function createUserAgent(appName, version) {
return `${appName}/${version} (Node.js ${process.version})`;
}
async function fetchWithDynamicUA(url, appName = 'MyBot', version = '1.0') {
const userAgent = createUserAgent(appName, version);
const response = await fetch(url, {
headers: {
'User-Agent': userAgent
}
});
console.log('Using User-Agent:', userAgent);
return await response.json();
}
// Axios with User-Agent
const axios = require('axios');
const client = axios.create({
headers: {
'User-Agent': 'MyAPI-Client/2.1.0'
}
});
async function axiosWithUA(url) {
const response = await client.get(url);
return response.data;
}
Explanation
Set User-Agent in the headers
object when making requests. Use descriptive names like AppName/Version (Platform)
format for identification.
Some APIs require specific User-Agent strings or block requests without them. Browser-like User-Agents help avoid blocking, but always respect robots.txt and terms of service when scraping.
Share this article
Add Comment
No comments yet. Be the first to comment!