Add robots.txt for new sites, with instructions for blocking AI crawlers

This commit is contained in:
Kyle Drake 2024-12-14 00:20:51 -06:00
parent d63467c4ca
commit db35971217
4 changed files with 56 additions and 2 deletions

View file

@ -494,6 +494,11 @@ class Site < Sequel::Model
FileUtils.cp template_file_path('neocities.png'), tmpfile.path FileUtils.cp template_file_path('neocities.png'), tmpfile.path
files << {filename: 'neocities.png', tempfile: tmpfile} files << {filename: 'neocities.png', tempfile: tmpfile}
tmpfile = Tempfile.new 'robots.txt'
tmpfile.close
FileUtils.cp template_file_path('robots.txt'), tmpfile.path
files << {filename: 'robots.txt', tempfile: tmpfile}
store_files files, new_install: true store_files files, new_install: true
end end

View file

@ -44,7 +44,7 @@ describe 'signup' do
_(File.exist?(index_file_path)).must_equal true _(File.exist?(index_file_path)).must_equal true
site = Site[username: @site[:username]] site = Site[username: @site[:username]]
_(site.site_files.length).must_equal 4 _(site.site_files.length).must_equal 5
_(site.site_changed).must_equal false _(site.site_changed).must_equal false
_(site.site_updated_at).must_be_nil _(site.site_updated_at).must_be_nil
_(site.is_education).must_equal true _(site.is_education).must_equal true

View file

@ -55,7 +55,7 @@ describe 'signup' do
_(File.exist?(index_file_path)).must_equal true _(File.exist?(index_file_path)).must_equal true
site = Site[username: @site[:username]] site = Site[username: @site[:username]]
_(site.site_files.length).must_equal 4 _(site.site_files.length).must_equal 5
_(site.site_changed).must_equal false _(site.site_changed).must_equal false
_(site.site_updated_at).must_be_nil _(site.site_updated_at).must_be_nil
_(site.is_education).must_equal false _(site.is_education).must_equal false

View file

@ -0,0 +1,49 @@
# This file tells search engines and bots what they are allowed to see on your site.
# This is the default rule, which allows search engines to crawl your site (recommended).
User-agent: *
Allow: /
# If you do not want AI bots to crawl your site, remove the # from the following lines:
#User-agent: AI2Bot
#User-agent: Ai2Bot-Dolma
#User-agent: Amazonbot
#User-agent: anthropic-ai
#User-agent: Applebot
#User-agent: Applebot-Extended
#User-agent: Bytespider
#User-agent: CCBot
#User-agent: ChatGPT-User
#User-agent: Claude-Web
#User-agent: ClaudeBot
#User-agent: cohere-ai
#User-agent: Diffbot
#User-agent: DuckAssistBot
#User-agent: FacebookBot
#User-agent: FriendlyCrawler
#User-agent: Google-Extended
#User-agent: GoogleOther
#User-agent: GoogleOther-Image
#User-agent: GoogleOther-Video
#User-agent: GPTBot
#User-agent: iaskspider/2.0
#User-agent: ICC-Crawler
#User-agent: ImagesiftBot
#User-agent: img2dataset
#User-agent: ISSCyberRiskCrawler
#User-agent: Kangaroo Bot
#User-agent: Meta-ExternalAgent
#User-agent: Meta-ExternalFetcher
#User-agent: OAI-SearchBot
#User-agent: omgili
#User-agent: omgilibot
#User-agent: PanguBot
#User-agent: PerplexityBot
#User-agent: PetalBot
#User-agent: Scrapy
#User-agent: Sidetrade indexer bot
#User-agent: Timpibot
#User-agent: VelenPublicWebCrawler
#User-agent: Webzio-Extended
#User-agent: YouBot
#Disallow: /