# Bohuvuj Robots.txt # Search Engine Crawler Directives # Allow all bots by default User-agent: * Allow: / Disallow: /__pycache__/ Disallow: /.git/ Disallow: /node_modules/ Disallow: /.env Disallow: /.env.local Disallow: /dist/ # Specific rules for major search engines User-agent: Googlebot Allow: / Crawl-delay: 0 Request-rate: 100/hour User-agent: Bingbot Allow: / Crawl-delay: 1 Request-rate: 50/hour User-agent: Yandexbot Allow: / Crawl-delay: 0.5 Request-rate: 50/hour User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / # Disallow bad bots User-agent: AhrefsBot Disallow: / User-agent: SemrushBot Disallow: / User-agent: DotBot Disallow: / # Sitemap location Sitemap: https://bohuvuj.com/sitemap.xml Sitemap: https://bohuvuj.com/robots.txt