If you're hosting your React website statically for example on AWS S3 then you probably don't want Server Side Rendering.
An alternative is using a Lambda Function and returning Pre-rendered HTML to Bots and a dynamic experience to users:
const fs = require('fs');
const path = require('path');
const isBot = (userAgent) => {
return /bot|googlebot|crawler|spider|robot|crawling/i.test(userAgent);
};
exports.handler = async (event, context) => {
const userAgent = event.headers['user-agent'];
const isSearchEngineCrawler = isBot(userAgent);
if (isSearchEngineCrawler) {
const preRenderedPath = path.join(__dirname, 'pre-rendered', event.path, 'index.html');
if (fs.existsSync(preRenderedPath)) {
const preRenderedHtml = fs.readFileSync(preRenderedPath, 'utf8');
return {
statusCode: 200,
headers: {
'Content-Type': 'text/html',
},
body: preRenderedHtml,
};
} else {
return {
statusCode: 404,
body: 'Page not found',
};
}
} else {
const indexHtml = fs.readFileSync(path.join(__dirname, 'build', 'index.html'), 'utf8');
return {
statusCode: 200,
headers: {
'Content-Type': 'text/html',
},
body: indexHtml,
};
}
};
Beware every request will go through the lambda.
*Also note you can put static html files in the "public" folder in a React website and these can contain the og:image and etc tags and redirect to a dynamic page, eg /shopping/coffee/morroco redirects to /shopping/coffee/#morroco. Unfortunately each time you post a link to FB, the crawler will check/update its cache so you do need something static at the endpoint (you can't upload the html files, force a batch FB link/cache update, then delete the html files).