diff --git a/models/site.rb b/models/site.rb index c3f2a516..e5845721 100644 --- a/models/site.rb +++ b/models/site.rb @@ -494,6 +494,11 @@ class Site < Sequel::Model FileUtils.cp template_file_path('neocities.png'), tmpfile.path files << {filename: 'neocities.png', tempfile: tmpfile} + tmpfile = Tempfile.new 'robots.txt' + tmpfile.close + FileUtils.cp template_file_path('robots.txt'), tmpfile.path + files << {filename: 'robots.txt', tempfile: tmpfile} + store_files files, new_install: true end diff --git a/tests/acceptance/education_tests.rb b/tests/acceptance/education_tests.rb index 8a9a3f05..d90e9ba6 100644 --- a/tests/acceptance/education_tests.rb +++ b/tests/acceptance/education_tests.rb @@ -44,7 +44,7 @@ describe 'signup' do _(File.exist?(index_file_path)).must_equal true site = Site[username: @site[:username]] - _(site.site_files.length).must_equal 4 + _(site.site_files.length).must_equal 5 _(site.site_changed).must_equal false _(site.site_updated_at).must_be_nil _(site.is_education).must_equal true diff --git a/tests/acceptance/signup_tests.rb b/tests/acceptance/signup_tests.rb index 8f9b8117..cff45d75 100644 --- a/tests/acceptance/signup_tests.rb +++ b/tests/acceptance/signup_tests.rb @@ -55,7 +55,7 @@ describe 'signup' do _(File.exist?(index_file_path)).must_equal true site = Site[username: @site[:username]] - _(site.site_files.length).must_equal 4 + _(site.site_files.length).must_equal 5 _(site.site_changed).must_equal false _(site.site_updated_at).must_be_nil _(site.is_education).must_equal false diff --git a/views/templates/robots.txt b/views/templates/robots.txt new file mode 100644 index 00000000..1bb81f23 --- /dev/null +++ b/views/templates/robots.txt @@ -0,0 +1,49 @@ +# This file tells search engines and bots what they are allowed to see on your site. + +# This is the default rule, which allows search engines to crawl your site (recommended). +User-agent: * +Allow: / + +# If you do not want AI bots to crawl your site, remove the # from the following lines: +#User-agent: AI2Bot +#User-agent: Ai2Bot-Dolma +#User-agent: Amazonbot +#User-agent: anthropic-ai +#User-agent: Applebot +#User-agent: Applebot-Extended +#User-agent: Bytespider +#User-agent: CCBot +#User-agent: ChatGPT-User +#User-agent: Claude-Web +#User-agent: ClaudeBot +#User-agent: cohere-ai +#User-agent: Diffbot +#User-agent: DuckAssistBot +#User-agent: FacebookBot +#User-agent: FriendlyCrawler +#User-agent: Google-Extended +#User-agent: GoogleOther +#User-agent: GoogleOther-Image +#User-agent: GoogleOther-Video +#User-agent: GPTBot +#User-agent: iaskspider/2.0 +#User-agent: ICC-Crawler +#User-agent: ImagesiftBot +#User-agent: img2dataset +#User-agent: ISSCyberRiskCrawler +#User-agent: Kangaroo Bot +#User-agent: Meta-ExternalAgent +#User-agent: Meta-ExternalFetcher +#User-agent: OAI-SearchBot +#User-agent: omgili +#User-agent: omgilibot +#User-agent: PanguBot +#User-agent: PerplexityBot +#User-agent: PetalBot +#User-agent: Scrapy +#User-agent: Sidetrade indexer bot +#User-agent: Timpibot +#User-agent: VelenPublicWebCrawler +#User-agent: Webzio-Extended +#User-agent: YouBot +#Disallow: / \ No newline at end of file