# robots.txt - Search engine crawler directives # sys32.lt - Advanced IT Solutions # Allow all search engines User-agent: * Allow: / # Specific rules for Googlebot User-agent: Googlebot Allow: / Crawl-delay: 0 # Disallow crawling of sensitive directories Disallow: /admin/ Disallow: /private/ Disallow: /temp/ Disallow: /.env Disallow: /.git/ # Allow crawling of CSS and JS files Allow: /*.css Allow: /*.js Allow: /css/ Allow: /js/ Allow: /assets/ # Allow crawling of data files Allow: /data/ # Disallow certain file types Disallow: *.pdf$ Disallow: *.zip$ # Set sitemap location Sitemap: https://sys32.lt/sitemap.xml Sitemap: https://sys32.lt/sitemap-mobile.xml # Crawl delay (in seconds) - adjust based on server capacity Crawl-delay: 0.5 # Request rate (pages per second) Request-rate: 10/1s