# robots.txt for Bridgetown Home Buyers # https://bridgetownbuys.com/robots.txt # Allow all search engines to crawl the site User-agent: * Allow: / # Sitemap location Sitemap: https://bridgetownbuys.com/sitemap.xml # LLM context files for AI assistants # These files provide structured information for language models Allow: /llms.txt Allow: /llms-full.txt # Disallow these paths from indexing Disallow: /admin/ Disallow: /thank-you/ # Crawl-delay (optional - remove if causing issues) # Crawl-delay: 1 # Specific bot instructions # Google User-agent: Googlebot Allow: / Crawl-delay: 0 # Bing User-agent: Bingbot Allow: / Crawl-delay: 0 # AI Crawlers and LLMs # Allow AI assistants to access LLM context files User-agent: GPTBot Allow: / Allow: /llms.txt Allow: /llms-full.txt User-agent: ChatGPT-User Allow: / Allow: /llms.txt Allow: /llms-full.txt User-agent: Google-Extended Allow: / Allow: /llms.txt Allow: /llms-full.txt User-agent: CCBot Allow: / Allow: /llms.txt Allow: /llms-full.txt User-agent: anthropic-ai Allow: / Allow: /llms.txt Allow: /llms-full.txt User-agent: Claude-Web Allow: / Allow: /llms.txt Allow: /llms-full.txt # Social Media Crawlers User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: / # Disallow bad bots (optional) # User-agent: SemrushBot # Disallow: / # User-agent: AhrefsBot # Disallow: / # User-agent: MJ12bot # Disallow: / # Host directive (optional - specify preferred domain) # Host: https://bridgetownbuys.com # Notes for developers: # - Update sitemap URL when domain changes # - Keep LLM context files publicly accessible # - Monitor crawl budget in Google Search Console # - Adjust crawl-delay if server load is an issue # - Review and update bot list as new AI crawlers emerge # Last Updated: April 18, 2026 # Content: 89 pages including 6 neighborhood hubs (42 neighborhoods), 13 county pages, 50+ city pages, blog