how real life robots.txt looks like:
User-agent: *
Disallow: /wp-content/uploads/wc-logs/
Disallow: /wp-content/uploads/woocommerce_transient_files/
Disallow: /wp-content/uploads/woocommerce_uploads/
Disallow: /wp-admin/
Allow: /wp-admin/admin-ajax.php
Sitemap: https://www.mywebsite.com/sitemap_index.xml
(in the root of app directory)
// app/robots.txt
export default function robots() {
return {
rules: {
userAgent: '*',
allow: '/',
disallow: '/private/',
},
sitemap: 'https://acme.com/sitemap.xml',
}
}
Output:
User-Agent: Googlebot
Allow: /
Disallow: /private/
Sitemap: https://acme.com/sitemap.xml