# public/robots.txt # Allow all crawlers User-agent: * Allow: / # Block authentication routes to prevent duplicate content Disallow: /sign-in/ Disallow: /sign-up/ Disallow: /sign-out/ Disallow: /verify/ Disallow: /reset-password/ # Sitemaps Sitemap: https://www.finnrick.com/sitemap.xml # Disallow API routes Disallow: /api/ # Disallow test request pages (optional, remove if you want these indexed) Disallow: /request-a-test Disallow: /free-product-test # Common files to disallow Disallow: /*.json$ Disallow: /*.js$ Disallow: /*.css$ # Rate limiting Crawl-delay: 1 # Host Host: https://www.finnrick.com