# Robots.txt for Utils+ (utilsplus.dev) # Updated: 2025-07-05 # Default rules for all crawlers User-agent: * Allow: / # Block access to development and internal files Disallow: /src/ Disallow: /dist/ Disallow: /node_modules/ Disallow: /.git/ Disallow: /.vscode/ Disallow: /scripts/ Disallow: /tests/ Disallow: /build/ Disallow: /coverage/ # Block specific file types (using more compatible patterns) Disallow: /*.log$ Disallow: /*.map$ Disallow: /package.json Disallow: /package-lock.json Disallow: /yarn.lock Disallow: /tsconfig.json Disallow: /tsconfig.app.json Disallow: /tsconfig.node.json Disallow: /vite.config.ts Disallow: /tailwind.config.js Disallow: /postcss.config.js Disallow: /eslint.config.js Disallow: /wrangler.toml # Block environment and config files Disallow: /.env Disallow: /.env.local Disallow: /.env.production Disallow: /.gitignore Disallow: /.nvmrc Disallow: /.yarnrc.yml # Explicitly allow important static assets Allow: /favicon.svg Allow: /logo.svg Allow: /manifest.json Allow: /_headers Allow: /_redirects Allow: /404.html Allow: /sitemap.xml Allow: /robots.txt # Specific directives for Googlebot (no crawl-delay) User-agent: Googlebot Allow: / Disallow: /src/ Disallow: /dist/ Disallow: /node_modules/ Disallow: /.git/ Disallow: /.vscode/ Disallow: /scripts/ Disallow: /tests/ # Specific directives for Bingbot User-agent: Bingbot Allow: / Disallow: /src/ Disallow: /dist/ Disallow: /node_modules/ Disallow: /.git/ # Specific directives for Yahoo (Slurp) User-agent: Slurp Allow: / Disallow: /src/ Disallow: /dist/ Disallow: /node_modules/ # Sitemap location Sitemap: https://utilsplus.dev/sitemap.xml # Note: All legacy query parameter URLs (/?tool=json, /?page=privacy) # have been permanently removed and replaced with clean URLs