Sitemap: https://www.stl.news/sitemap_index.xml Sitemap: https://www.stl.news/news-sitemap.xml Sitemap: https://www.stl.news/video-sitemap.xml User-agent: * Allow: /wp-admin/admin-ajax.php Allow: /*/*.css Allow: /*/*.js Disallow: /wp-admin/ Disallow: /wp-includes/ Disallow: /readme.html Disallow: /license.txt Disallow: /xmlrpc.php Disallow: /wp-login.php Disallow: /wp-register.php Disallow: */disclaimer/* Disallow: *?attachment_id= Disallow: /privacy-policy User-agent: Googlebot Allow: / User-agent: Googlebot-Image Allow: /wp-content/uploads/ User-agent: Mediapartners-Google Allow: / User-agent: AdsBot-Google Allow: / User-agent: AdsBot-Google-Mobile Allow: / User-agent: Bingbot Allow: / User-agent: Msnbot Allow: / User-agent: msnbot-media Allow: /wp-content/uploads/ User-agent: Applebot Allow: / User-agent: Yandex Allow: / User-agent: YandexImages Allow: /wp-content/uploads/ User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / User-agent: Qwantify Allow: / # Popular chinese search engines User-agent: Baiduspider Allow: / User-agent: Baiduspider/2.0 Allow: / User-agent: Baiduspider-video Allow: / User-agent: Baiduspider-image Allow: / User-agent: Sogou spider Allow: / User-agent: Sogou web spider Allow: / User-agent: Sosospider Allow: / User-agent: Sosospider+ Allow: / User-agent: Sosospider/2.0 Allow: / User-agent: yodao Allow: / User-agent: youdao Allow: / User-agent: YoudaoBot Allow: / User-agent: YoudaoBot/1.0 Allow: / # Block Bad Bots. "AI recommended setting" by ChatGPT User-agent: ia_archiver Disallow: / User-agent: archive.org_bot Disallow: / User-agent: SiteExplorer Disallow: / User-agent: spbot Disallow: / User-agent: WBSearchBot Disallow: / User-agent: linkdexbot Disallow: / User-agent: Screaming Frog SEO Spider Disallow: / User-agent: netEstate NE Crawler Disallow: / User-agent: Moreover Disallow: / User-agent: sentibot Disallow: / User-agent: Aboundexbot Disallow: / User-agent: proximic Disallow: / User-agent: oBot Disallow: / User-agent: meanpathbot Disallow: / User-agent: Nutch Disallow: / User-agent: TurnitinBot Disallow: / User-agent: ZoominfoBot Disallow: / User-agent: ZmEu Disallow: / User-agent: grapeshot Disallow: / User-agent: python-requests Disallow: / User-agent: Go-http-client Disallow: / User-agent: Apache-HttpClient Disallow: / User-agent: libwww-perl Disallow: / User-agent: curl Disallow: / User-agent: wget Disallow: / # ChatGPT Bot Blocker - Block ChatGPT Bot from scrapping your content User-agent: GPTBot Disallow: / # Spam Backlink Blocker Disallow: /feed/ Disallow: /feed/$ Disallow: /comments/feed Disallow: /trackback/ Disallow: */?author=* Disallow: */author/* Disallow: /author* Disallow: /author/ Disallow: */comments$ Disallow: */feed Disallow: */feed$ Disallow: */trackback Disallow: */trackback$ Disallow: /?feed= Disallow: /wp-comments Disallow: /wp-feed Disallow: /wp-trackback Disallow: */replytocom= # Block Bad Bots. Powered by Better Robots.txt Pro User-agent: GiftGhostBot Disallow: / User-agent: Seznam Disallow: / User-agent: PaperLiBot Disallow: / User-agent: Genieo Disallow: / User-agent: Dataprovider/6.101 Disallow: / User-agent: DataproviderSiteExplorer Disallow: / User-agent: Dazoobot/1.0 Disallow: / User-agent: Diffbot Disallow: / User-agent: DomainStatsBot/1.0 Disallow: / User-agent: dubaiindex Disallow: / User-agent: eCommerceBot Disallow: / User-agent: ExpertSearchSpider Disallow: / User-agent: Feedbin Disallow: / User-agent: Fetch/2.0a Disallow: / User-agent: FFbot/1.0 Disallow: / User-agent: focusbot/1.1 Disallow: / User-agent: HuaweiSymantecSpider Disallow: / User-agent: HuaweiSymantecSpider/1.0 Disallow: / User-agent: JobdiggerSpider Disallow: / User-agent: LemurWebCrawler Disallow: / User-agent: LipperheyLinkExplorer Disallow: / User-agent: LSSRocketCrawler/1.0 Disallow: / User-agent: LYT.SRv1.5 Disallow: / User-agent: MiaDev/0.0.1 Disallow: / User-agent: Najdi.si/3.1 Disallow: / User-agent: BountiiBot Disallow: / User-agent: Experibot_v1 Disallow: / User-agent: bixocrawler Disallow: / User-agent: bixocrawler TestCrawler Disallow: / User-agent: Crawler4j Disallow: / User-agent: Crowsnest/0.5 Disallow: / User-agent: CukBot Disallow: / User-agent: Dataprovider/6.92 Disallow: / User-agent: DBLBot/1.0 Disallow: / User-agent: Diffbot/0.1 Disallow: / User-agent: Digg Deeper/v1 Disallow: / User-agent: discobot/1.0 Disallow: / User-agent: discobot/1.1 Disallow: / User-agent: discobot/2.0 Disallow: / User-agent: discoverybot/2.0 Disallow: / User-agent: Dlvr.it/1.0 Disallow: / User-agent: DomainStatsBot/1.0 Disallow: / User-agent: drupact/0.7 Disallow: / User-agent: Ezooms/1.0 Disallow: / User-agent: fastbot crawler beta 2.0 Disallow: / User-agent: fastbot crawler beta 4.0 Disallow: / User-agent: feedly social Disallow: / User-agent: Feedly/1.0 Disallow: / User-agent: FeedlyBot/1.0 Disallow: / User-agent: Feedspot Disallow: / User-agent: Feedspotbot/1.0 Disallow: / User-agent: Clickagy Intelligence Bot v2 Disallow: / User-agent: classbot Disallow: / User-agent: CISPA Vulnerability Notification Disallow: / User-agent: CirrusExplorer/1.1 Disallow: / User-agent: Checksem/Nutch-1.10 Disallow: / User-agent: CatchBot/5.0 Disallow: / User-agent: CatchBot/3.0 Disallow: / User-agent: CatchBot/2.0 Disallow: / User-agent: CatchBot/1.0 Disallow: / User-agent: CamontSpider/1.0 Disallow: / User-agent: Buzzbot/1.0 Disallow: / User-agent: Buzzbot Disallow: / User-agent: BusinessSeek.biz_Spider Disallow: / User-agent: BUbiNG Disallow: / User-agent: 008/0.85 Disallow: / User-agent: 008/0.83 Disallow: / User-agent: 008/0.71 Disallow: / User-agent: ^Nail Disallow: / User-agent: FyberSpider/1.3 Disallow: / User-agent: findlinks/1.1.6-beta5 Disallow: / User-agent: g2reader-bot/1.0 Disallow: / User-agent: findlinks/1.1.6-beta6 Disallow: / User-agent: findlinks/2.0 Disallow: / User-agent: findlinks/2.0.1 Disallow: / User-agent: findlinks/2.0.2 Disallow: / User-agent: findlinks/2.0.4 Disallow: / User-agent: findlinks/2.0.5 Disallow: / User-agent: findlinks/2.0.9 Disallow: / User-agent: findlinks/2.1 Disallow: / User-agent: findlinks/2.1.5 Disallow: / User-agent: findlinks/2.1.3 Disallow: / User-agent: findlinks/2.2 Disallow: / User-agent: findlinks/2.5 Disallow: / User-agent: findlinks/2.6 Disallow: / User-agent: FFbot/1.0 Disallow: / User-agent: findlinks/1.0 Disallow: / User-agent: findlinks/1.1.3-beta8 Disallow: / User-agent: findlinks/1.1.3-beta9 Disallow: / User-agent: findlinks/1.1.4-beta7 Disallow: / User-agent: findlinks/1.1.6-beta1 Disallow: / User-agent: findlinks/1.1.6-beta1 Yacy Disallow: / User-agent: findlinks/1.1.6-beta2 Disallow: / User-agent: findlinks/1.1.6-beta3 Disallow: / User-agent: findlinks/1.1.6-beta4 Disallow: / User-agent: bixo Disallow: / User-agent: bixolabs/1.0 Disallow: / User-agent: Crawlera/1.10.2 Disallow: / User-agent: Dataprovider Site Explorer Disallow: / # Backlink Protector. Powered by Better Robots.txt Pro User-agent: AhrefsBot Disallow: / User-agent: Alexibot Disallow: / User-agent: MJ12bot Disallow: / User-agent: SurveyBot Disallow: / User-agent: Xenu's Disallow: / User-agent: Xenu's Link Sleuth 1.1c Disallow: / User-agent: rogerbot Disallow: / User-agent: SemrushBot Disallow: / User-agent: SemrushBot-SA Disallow: / User-agent: SemrushBot-BA Disallow: / User-agent: SemrushBot-SI Disallow: / User-agent: SemrushBot-SWA Disallow: / User-agent: SemrushBot-CT Disallow: / User-agent: SemrushBot-BM Disallow: / User-agent: DotBot/1.1 Disallow: / User-agent: DotBot Disallow: / # Image Crawlability by search engines User-agent: * Allow: /*.png* Allow: /*.jpg* Allow: /*.gif* Allow: /*.webp* # Avoid crawler traps causing crawl budget issues Disallow: /search/ Disallow: *?s=* Disallow: *?p=* Disallow: *&p=* Disallow: *&preview=* Disallow: /search # Social Media Crawling User-agent: facebookexternalhit/1.0 Allow: / User-agent: facebookexternalhit/1.1 Allow: / User-agent: facebookplatform/1.0 Allow: / User-agent: Facebot/1.0 Allow: / User-agent: Visionutils/0.2 Allow: / User-agent: datagnionbot Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot/1.0 Allow: / User-agent: Pinterest/0.1 Allow: / User-agent: Pinterest/0.2 Allow: / # Allow/Disallow Ads.txt User-agent: * Disallow: /ads.txt # Allow/Disallow App-ads.txt User-agent: * Disallow: /app-ads.txt Crawl-delay: 10 # This robots.txt file was created by Better Robots.txt (Index & Rank Booster by Pagup) Plugin. https://www.better-robots.com/