Merge branch 'master' into patch-2

This commit is contained in:
Kyle Drake 2025-04-22 21:59:57 -05:00 committed by GitHub
commit 739a797a2e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
1286 changed files with 713001 additions and 5263 deletions

46
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,46 @@
name: CI
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-22.04
services:
postgres:
image: postgres
env:
POSTGRES_DB: ci_test
POSTGRES_PASSWORD: citestpassword
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
redis:
image: redis
# Set health checks to wait until redis has started
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps port 6379 on service container to the host
- 6379:6379
steps:
- uses: actions/checkout@v2
- run: sudo apt-get update && sudo apt-get -y install libimlib2-dev chromium-browser
- uses: ruby/setup-ruby@v1
with:
ruby-version: '3.3'
bundler-cache: true
- name: Install dependencies
run: bundle install
- name: Run tests with Coveralls
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
run: bundle exec rake

1
.gitignore vendored
View file

@ -20,6 +20,7 @@ files/sslsites.zip
.vagrant
public/banned_sites
public/deleted_sites
files/disposable_email_whitelist.conf
files/disposable_email_blacklist.conf
files/banned_email_blacklist.conf
files/letsencrypt.key

View file

@ -1,11 +0,0 @@
language: ruby
rvm:
- "2.6.0"
services:
- redis-server
- postgresql
before_script:
- psql -c 'create database travis_ci_test;' -U postgres
sudo: false
bundler_args: --jobs=1
before_install: gem install bundler

43
Gemfile
View file

@ -1,34 +1,31 @@
source 'https://rubygems.org'
gem 'sinatra', '2.0.5'
gem 'sinatra'
gem 'redis'
gem 'redis-namespace'
gem 'sequel'
gem 'redis-namespace'
gem 'bcrypt'
gem 'sinatra-flash', require: 'sinatra/flash'
gem 'sinatra-xsendfile', require: 'sinatra/xsendfile'
gem 'puma', require: nil
gem 'rmagick', require: nil
gem 'sidekiq', '~> 4.2.10'
gem 'puma', '< 7', require: nil
gem 'sidekiq', '~> 7'
gem 'mail'
gem 'tilt'
gem 'erubis'
gem 'stripe', '~> 5.17.0' #, source: 'https://code.stripe.com/'
gem 'erubi'
gem 'stripe' #, source: 'https://code.stripe.com/'
gem 'terrapin'
gem 'zipruby'
gem 'sass', require: nil
gem 'dav4rack', git: 'https://github.com/neocities/dav4rack.git', ref: '3ecde122a0b8bcc1d85581dc85ef3a7120b6a8f0'
gem 'dav4rack', git: 'https://github.com/neocities/dav4rack.git', ref: '1bf1975c613d4f14d00f1e70ce7e0bb9e2e6cd9b'
gem 'filesize'
gem 'thread'
gem 'rack-cache'
gem 'rest-client', require: 'rest_client'
gem 'addressable', require: 'addressable/uri'
gem 'addressable', '>= 2.8.0', require: 'addressable/uri'
gem 'paypal-recurring', require: 'paypal/recurring'
gem 'geoip'
gem 'io-extra', require: 'io/extra'
gem 'rye'
gem 'base32'
gem 'coveralls', require: false
#gem 'rye'
gem 'coveralls_reborn', require: false
gem 'sanitize'
gem 'will_paginate'
gem 'simpleidn'
@ -53,10 +50,22 @@ gem 'activesupport'
gem 'facter', require: nil
gem 'maxmind-db'
gem 'json', '>= 2.3.0'
gem 'nokogiri'
gem 'webp-ffi'
gem 'rszr'
gem 'zip_tricks'
gem 'adequate_crypto_address'
gem 'twilio-ruby'
gem 'phonelib'
gem 'dnsbl-client'
gem 'minfraud'
gem 'image_optimizer' # apt install optipng jpegoptim pngquant
gem 'rubyzip', require: 'zip'
gem 'airbrake'
gem 'csv'
group :development, :test do
gem 'pry'
gem 'pry-byebug'
end
group :development do
@ -73,12 +82,12 @@ group :test do
gem 'mocha', require: nil
gem 'rake', '>= 12.3.3', require: nil
gem 'capybara', require: nil #, '2.10.1', require: nil
gem 'selenium-webdriver'
gem 'rack_session_access', require: nil
gem 'webmock', '3.5.1', require: nil
gem 'stripe-ruby-mock', '2.5.8', require: 'stripe_mock'
gem 'webmock', require: nil
gem 'stripe-ruby-mock', '~> 3.1.0.rc3', require: 'stripe_mock'
gem 'timecop'
gem 'mock_redis'
gem 'simplecov', require: nil
gem 'm'
gem 'apparition'
end

View file

@ -1,306 +1,423 @@
GIT
remote: https://github.com/neocities/dav4rack.git
revision: 3ecde122a0b8bcc1d85581dc85ef3a7120b6a8f0
ref: 3ecde122a0b8bcc1d85581dc85ef3a7120b6a8f0
revision: 1bf1975c613d4f14d00f1e70ce7e0bb9e2e6cd9b
ref: 1bf1975c613d4f14d00f1e70ce7e0bb9e2e6cd9b
specs:
dav4rack (1.1.0)
addressable (>= 2.5.0)
nokogiri (>= 1.6.0)
ox (>= 2.1.0)
rack (>= 1.6)
dav4rack (0.3.0)
nokogiri (>= 1.4.2)
rack (~> 3.0)
uuidtools (~> 2.1.1)
webrick
GEM
remote: https://rubygems.org/
specs:
acme-client (2.0.6)
faraday (>= 0.17, < 2.0.0)
activesupport (6.0.3.1)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
tzinfo (~> 1.1)
zeitwerk (~> 2.2, >= 2.2.2)
addressable (2.7.0)
public_suffix (>= 2.0.2, < 5.0)
annoy (0.5.6)
highline (>= 1.5.0)
acme-client (2.0.19)
base64 (~> 0.2.0)
faraday (>= 1.0, < 3.0.0)
faraday-retry (>= 1.0, < 3.0.0)
activesupport (8.0.1)
base64
benchmark (>= 0.3)
bigdecimal
concurrent-ruby (~> 1.0, >= 1.3.1)
connection_pool (>= 2.2.5)
drb
i18n (>= 1.6, < 2)
logger (>= 1.4.2)
minitest (>= 5.1)
securerandom (>= 0.3)
tzinfo (~> 2.0, >= 2.0.5)
uri (>= 0.13.1)
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
adequate_crypto_address (0.1.9)
base58 (~> 0.2)
keccak (~> 1.3)
airbrake (13.0.5)
airbrake-ruby (~> 6.0)
airbrake-ruby (6.2.2)
rbtree3 (~> 0.6)
ansi (1.5.0)
apparition (0.5.0)
capybara (~> 3.13, < 4)
websocket-driver (>= 0.6.5)
base32 (0.3.2)
bcrypt (3.1.13)
builder (3.2.3)
byebug (11.0.1)
capybara (3.32.2)
base58 (0.2.3)
base64 (0.2.0)
bcrypt (3.1.20)
benchmark (0.4.0)
bigdecimal (3.1.9)
builder (3.3.0)
capybara (3.40.0)
addressable
matrix
mini_mime (>= 0.1.3)
nokogiri (~> 1.8)
nokogiri (~> 1.11)
rack (>= 1.6.0)
rack-test (>= 0.6.3)
regexp_parser (~> 1.5)
regexp_parser (>= 1.5, < 3.0)
xpath (~> 3.2)
certified (1.0.0)
climate_control (0.2.0)
coderay (1.1.2)
concurrent-ruby (1.1.6)
connection_pool (2.2.2)
coveralls (0.8.23)
json (>= 1.8, < 3)
simplecov (~> 0.16.1)
term-ansicolor (~> 1.3)
thor (>= 0.19.4, < 2.0)
tins (~> 1.6)
crack (0.4.3)
safe_yaml (~> 1.0.0)
climate_control (1.2.0)
coderay (1.1.3)
concurrent-ruby (1.3.5)
connection_pool (2.5.0)
coveralls_reborn (0.28.0)
simplecov (~> 0.22.0)
term-ansicolor (~> 1.7)
thor (~> 1.2)
tins (~> 1.32)
crack (1.0.0)
bigdecimal
rexml
crass (1.0.6)
csv (3.3.2)
dante (0.2.0)
docile (1.3.2)
domain_name (0.5.20190701)
unf (>= 0.0.5, < 1.0.0)
drydock (0.6.9)
erubis (2.7.0)
exifr (1.3.6)
fabrication (2.20.2)
facter (2.5.6)
faker (2.4.0)
i18n (~> 1.6.0)
faraday (0.17.3)
multipart-post (>= 1.2, < 3)
faraday_middleware (0.14.0)
faraday (>= 0.7.4, < 1.0)
date (3.4.1)
dnsbl-client (1.1.1)
docile (1.4.1)
domain_name (0.6.20240107)
drb (2.2.1)
erubi (1.13.1)
exifr (1.4.1)
fabrication (2.31.0)
facter (4.10.0)
hocon (~> 1.3)
thor (>= 1.0.1, < 1.3)
faker (3.5.1)
i18n (>= 1.8.11, < 2)
faraday (1.10.4)
faraday-em_http (~> 1.0)
faraday-em_synchrony (~> 1.0)
faraday-excon (~> 1.1)
faraday-httpclient (~> 1.0)
faraday-multipart (~> 1.0)
faraday-net_http (~> 1.0)
faraday-net_http_persistent (~> 1.0)
faraday-patron (~> 1.0)
faraday-rack (~> 1.0)
faraday-retry (~> 1.0)
ruby2_keywords (>= 0.0.4)
faraday-em_http (1.0.0)
faraday-em_synchrony (1.0.0)
faraday-excon (1.1.0)
faraday-httpclient (1.0.1)
faraday-multipart (1.1.0)
multipart-post (~> 2.0)
faraday-net_http (1.0.2)
faraday-net_http_persistent (1.2.0)
faraday-patron (1.0.0)
faraday-rack (1.0.0)
faraday-retry (1.0.3)
faraday_middleware (1.2.1)
faraday (~> 1.0)
feedjira (2.1.4)
faraday (>= 0.9)
faraday_middleware (>= 0.9)
loofah (>= 2.0)
sax-machine (>= 1.0)
ffi (1.11.1)
ffi (1.17.1-aarch64-linux-gnu)
ffi (1.17.1-aarch64-linux-musl)
ffi (1.17.1-arm-linux-gnu)
ffi (1.17.1-arm-linux-musl)
ffi (1.17.1-arm64-darwin)
ffi (1.17.1-x86_64-darwin)
ffi (1.17.1-x86_64-linux-gnu)
ffi (1.17.1-x86_64-linux-musl)
ffi-compiler (1.3.2)
ffi (>= 1.15.5)
rake
filesize (0.2.0)
fspath (3.1.2)
gandi (3.3.28)
hashie
xmlrpc
geoip (1.6.4)
hashdiff (1.0.0)
hashie (3.6.0)
highline (2.0.2)
hashdiff (1.1.2)
hashie (5.0.0)
hiredis (0.6.3)
hoe (3.22.1)
hocon (1.4.0)
hoe (4.2.2)
rake (>= 0.8, < 15.0)
htmlentities (4.3.4)
http (4.1.1)
addressable (~> 2.3)
http (5.2.0)
addressable (~> 2.8)
base64 (~> 0.1)
http-cookie (~> 1.0)
http-form_data (~> 2.0)
http_parser.rb (~> 0.6.0)
http-form_data (~> 2.2)
llhttp-ffi (~> 0.5.0)
http-accept (1.7.0)
http-cookie (1.0.3)
http-cookie (1.0.8)
domain_name (~> 0.5)
http-form_data (2.1.1)
http_parser.rb (0.6.0)
i18n (1.6.0)
http-form_data (2.3.0)
i18n (1.14.7)
concurrent-ruby (~> 1.0)
image_optim (0.26.5)
image_optim (0.31.4)
exifr (~> 1.2, >= 1.2.2)
fspath (~> 3.0)
image_size (>= 1.5, < 3)
image_size (>= 1.5, < 4)
in_threads (~> 1.3)
progress (~> 3.0, >= 3.0.1)
image_optim_pack (0.6.0)
image_optim_pack (0.11.2)
fspath (>= 2.1, < 4)
image_optim (~> 0.19)
image_optim_pack (0.6.0-x86_64-linux)
image_optim_pack (0.11.2-x86_64-darwin)
fspath (>= 2.1, < 4)
image_optim (~> 0.19)
image_size (2.0.2)
in_threads (1.5.3)
io-extra (1.3.0)
image_optim_pack (0.11.2-x86_64-linux)
fspath (>= 2.1, < 4)
image_optim (~> 0.19)
image_optimizer (1.9.0)
image_size (3.4.0)
in_threads (1.6.0)
io-extra (1.4.0)
ipaddress (0.8.3)
json (2.3.1)
loofah (2.5.0)
json (2.9.1)
jwt (2.10.1)
base64
keccak (1.3.2)
llhttp-ffi (0.5.0)
ffi-compiler (~> 1.0)
rake (~> 13.0)
logger (1.6.5)
loofah (2.24.0)
crass (~> 1.0.2)
nokogiri (>= 1.5.9)
m (1.5.1)
nokogiri (>= 1.12.0)
m (1.6.2)
method_source (>= 0.6.7)
rake (>= 0.9.2.2)
magic (0.2.9)
ffi (>= 0.6.3)
mail (2.7.1)
mail (2.8.1)
mini_mime (>= 0.1.1)
maxmind-db (1.0.0)
metaclass (0.0.4)
method_source (0.9.2)
mime-types (3.3)
net-imap
net-pop
net-smtp
matrix (0.4.2)
maxmind-db (1.2.0)
maxmind-geoip2 (1.2.0)
connection_pool (~> 2.2)
http (>= 4.3, < 6.0)
maxmind-db (~> 1.2)
method_source (1.1.0)
mime-types (3.6.0)
logger
mime-types-data (~> 3.2015)
mime-types-data (3.2019.0904)
mini_mime (1.0.2)
mini_portile2 (2.4.0)
minitest (5.11.3)
minitest-reporters (1.3.8)
mime-types-data (3.2025.0107)
minfraud (2.6.0)
connection_pool (~> 2.2)
http (>= 4.3, < 6.0)
maxmind-geoip2 (~> 1.2)
simpleidn (~> 0.1, >= 0.1.1)
mini_mime (1.1.5)
minitest (5.25.4)
minitest-reporters (1.7.1)
ansi
builder
minitest (>= 5.0)
ruby-progressbar
mocha (1.9.0)
metaclass (~> 0.0.1)
mock_redis (0.21.0)
monetize (1.9.2)
mocha (2.7.1)
ruby2_keywords (>= 0.0.5)
mock_redis (0.49.0)
redis (~> 5)
monetize (1.13.0)
money (~> 6.12)
money (6.13.4)
money (6.19.0)
i18n (>= 0.6.4, <= 2)
msgpack (1.3.1)
multi_json (1.13.1)
multipart-post (2.1.1)
mustermann (1.0.3)
net-scp (2.0.0)
net-ssh (>= 2.6.5, < 6.0.0)
net-ssh (5.2.0)
msgpack (1.7.5)
multi_json (1.15.0)
multipart-post (2.4.1)
mustermann (3.0.3)
ruby2_keywords (~> 0.0.1)
net-imap (0.5.6)
date
net-protocol
net-pop (0.1.2)
net-protocol
net-protocol (0.2.2)
timeout
net-smtp (0.5.1)
net-protocol
netrc (0.11.0)
nio4r (2.5.2)
nokogiri (1.10.9)
mini_portile2 (~> 2.4.0)
nokogumbo (2.0.2)
nokogiri (~> 1.8, >= 1.8.4)
ox (2.11.0)
nio4r (2.7.4)
nokogiri (1.18.8-aarch64-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.8-aarch64-linux-musl)
racc (~> 1.4)
nokogiri (1.18.8-arm-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.8-arm-linux-musl)
racc (~> 1.4)
nokogiri (1.18.8-arm64-darwin)
racc (~> 1.4)
nokogiri (1.18.8-x86_64-darwin)
racc (~> 1.4)
nokogiri (1.18.8-x86_64-linux-gnu)
racc (~> 1.4)
nokogiri (1.18.8-x86_64-linux-musl)
racc (~> 1.4)
ostruct (0.6.1)
paypal-recurring (1.1.0)
pg (1.1.4)
progress (3.5.2)
pry (0.12.2)
coderay (~> 1.1.0)
method_source (~> 0.9.0)
pry-byebug (3.7.0)
byebug (~> 11.0)
pry (~> 0.10)
public_suffix (4.0.5)
puma (4.3.5)
pg (1.5.9)
phonelib (0.10.3)
progress (3.6.0)
pry (0.15.2)
coderay (~> 1.1)
method_source (~> 1.0)
public_suffix (6.0.1)
puma (6.6.0)
nio4r (~> 2.0)
rack (2.2.3)
rack-cache (1.9.0)
racc (1.8.1)
rack (3.1.12)
rack-cache (1.17.0)
rack (>= 0.4)
rack-protection (2.0.5)
rack
rack-test (1.1.0)
rack (>= 1.0, < 3)
rack-protection (4.1.1)
base64 (>= 0.1.0)
logger (>= 1.6.0)
rack (>= 3.0.0, < 4)
rack-session (2.1.0)
base64 (>= 0.1.0)
rack (>= 3.0.0)
rack-test (2.2.0)
rack (>= 1.3)
rack_session_access (0.2.0)
builder (>= 2.0.0)
rack (>= 1.0.0)
rake (13.0.1)
rb-fsevent (0.10.3)
rb-inotify (0.10.0)
rake (13.2.1)
rb-fsevent (0.11.2)
rb-inotify (0.11.1)
ffi (~> 1.0)
redis (3.3.5)
redis-namespace (1.6.0)
redis (>= 3.0.4)
regexp_parser (1.7.1)
rbtree3 (0.7.1)
redis (5.3.0)
redis-client (>= 0.22.0)
redis-client (0.23.2)
connection_pool
redis-namespace (1.11.0)
redis (>= 4)
regexp_parser (2.10.0)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
rexml (3.4.1)
rinku (2.0.6)
rmagick (4.1.2)
ruby-progressbar (1.10.1)
rye (0.9.13)
annoy
docile (>= 1.0.1)
highline (>= 1.5.1)
net-scp (>= 1.0.2)
net-ssh (>= 2.0.13)
sysinfo (>= 0.8.1)
safe_yaml (1.0.5)
sanitize (5.2.1)
rszr (1.5.0)
ruby-progressbar (1.13.0)
ruby2_keywords (0.0.5)
rubyzip (2.4.1)
sanitize (7.0.0)
crass (~> 1.0.2)
nokogiri (>= 1.8.0)
nokogumbo (~> 2.0)
nokogiri (>= 1.16.8)
sass (3.7.4)
sass-listen (~> 4.0.0)
sass-listen (4.0.0)
rb-fsevent (~> 0.9, >= 0.9.4)
rb-inotify (~> 0.9, >= 0.9.7)
sax-machine (1.3.2)
sequel (5.24.0)
sequel_pg (1.12.2)
pg (>= 0.18.0)
securerandom (0.4.1)
selenium-webdriver (4.28.0)
base64 (~> 0.2)
logger (~> 1.4)
rexml (~> 3.2, >= 3.2.5)
rubyzip (>= 1.2.2, < 3.0)
websocket (~> 1.0)
sequel (5.89.0)
bigdecimal
sequel_pg (1.17.1)
pg (>= 0.18.0, != 1.2.0)
sequel (>= 4.38.0)
shotgun (0.9.2)
rack (>= 1.0)
sidekiq (4.2.10)
concurrent-ruby (~> 1.0)
connection_pool (~> 2.2, >= 2.2.0)
rack-protection (>= 1.5.0)
redis (~> 3.2, >= 3.2.1)
simplecov (0.16.1)
sidekiq (7.3.8)
base64
connection_pool (>= 2.3.0)
logger
rack (>= 2.2.4)
redis-client (>= 0.22.2)
simplecov (0.22.0)
docile (~> 1.1)
json (>= 1.8, < 3)
simplecov-html (~> 0.10.0)
simplecov-html (0.10.2)
simpleidn (0.1.1)
unf (~> 0.1.4)
sinatra (2.0.5)
mustermann (~> 1.0)
rack (~> 2.0)
rack-protection (= 2.0.5)
simplecov-html (~> 0.11)
simplecov_json_formatter (~> 0.1)
simplecov-html (0.13.1)
simplecov_json_formatter (0.1.4)
simpleidn (0.2.3)
sinatra (4.1.1)
logger (>= 1.6.0)
mustermann (~> 3.0)
rack (>= 3.0.0, < 4)
rack-protection (= 4.1.1)
rack-session (>= 2.0.0, < 3)
tilt (~> 2.0)
sinatra-flash (0.3.0)
sinatra (>= 1.0.0)
sinatra-xsendfile (0.4.2)
sinatra (>= 0.9.1)
storable (0.8.9)
stripe (5.17.0)
stripe-ruby-mock (2.5.8)
stripe (5.55.0)
stripe-ruby-mock (3.1.0)
dante (>= 0.2.0)
multi_json (~> 1.0)
stripe (>= 2.0.3)
sysinfo (0.8.1)
drydock
storable
term-ansicolor (1.7.1)
stripe (> 5, < 6)
sync (0.5.0)
term-ansicolor (1.11.2)
tins (~> 1.0)
terrapin (0.6.0)
climate_control (>= 0.0.3, < 1.0)
thor (0.20.3)
terrapin (1.0.1)
climate_control
thor (1.2.2)
thread (0.2.2)
thread_safe (0.3.6)
tilt (2.0.9)
timecop (0.9.1)
tins (1.21.1)
tzinfo (1.2.7)
thread_safe (~> 0.1)
unf (0.1.4)
unf_ext
unf_ext (0.0.7.6)
tilt (2.6.0)
timecop (0.9.10)
timeout (0.4.3)
tins (1.38.0)
bigdecimal
sync
twilio-ruby (7.4.3)
benchmark
faraday (>= 0.9, < 3.0)
jwt (>= 1.5, < 3.0)
nokogiri (>= 1.6, < 2.0)
ostruct
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
uri (1.0.3)
uuidtools (2.1.5)
webmock (3.5.1)
addressable (>= 2.3.6)
webmock (3.25.0)
addressable (>= 2.8.0)
crack (>= 0.3.2)
hashdiff
websocket-driver (0.7.2)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.5)
will_paginate (3.1.8)
xmlrpc (0.3.0)
hashdiff (>= 0.4.0, < 2.0.0)
webp-ffi (0.4.0)
ffi (>= 1.9.0)
ffi-compiler (>= 0.1.2)
webrick (1.9.1)
websocket (1.2.11)
will_paginate (4.0.1)
xmlrpc (0.3.3)
webrick
xpath (3.2.0)
nokogiri (~> 1.8)
zeitwerk (2.3.0)
zipruby (0.3.6)
zip_tricks (5.6.0)
PLATFORMS
ruby
x86_64-linux
aarch64-linux-gnu
aarch64-linux-musl
arm-linux-gnu
arm-linux-musl
arm64-darwin
x86_64-darwin
x86_64-linux-gnu
x86_64-linux-musl
DEPENDENCIES
acme-client (~> 2.0.0)
activesupport
addressable
apparition
base32
addressable (>= 2.8.0)
adequate_crypto_address
airbrake
bcrypt
capybara
certified
coveralls
coveralls_reborn
csv
dav4rack!
erubis
dnsbl-client
erubi
fabrication
facter
faker
@ -314,6 +431,7 @@ DEPENDENCIES
http
image_optim
image_optim_pack
image_optimizer
io-extra
ipaddress
json (>= 2.3.0)
@ -321,17 +439,19 @@ DEPENDENCIES
magic
mail
maxmind-db
minfraud
minitest
minitest-reporters
mocha
mock_redis
monetize
msgpack
nokogiri
paypal-recurring
pg
phonelib
pry
pry-byebug
puma
puma (< 7)
rack-cache
rack-test
rack_session_access
@ -340,29 +460,32 @@ DEPENDENCIES
redis-namespace
rest-client
rinku
rmagick
rye
rszr
rubyzip
sanitize
sass
selenium-webdriver
sequel
sequel_pg
shotgun
sidekiq (~> 4.2.10)
sidekiq (~> 7)
simplecov
simpleidn
sinatra (= 2.0.5)
sinatra
sinatra-flash
sinatra-xsendfile
stripe (~> 5.17.0)
stripe-ruby-mock (= 2.5.8)
stripe
stripe-ruby-mock (~> 3.1.0.rc3)
terrapin
thread
tilt
timecop
webmock (= 3.5.1)
twilio-ruby
webmock
webp-ffi
will_paginate
xmlrpc
zipruby
zip_tricks
BUNDLED WITH
2.1.4
2.6.3

View file

@ -2,7 +2,7 @@
# Neocities.org
[![Build Status](https://travis-ci.org/neocities/neocities.png?branch=master)](https://travis-ci.org/neocities/neocities)
[![Build Status](https://github.com/neocities/neocities/actions/workflows/ci.yml/badge.svg)](https://github.com/neocities/neocities/actions?query=workflow%3ACI)
[![Coverage Status](https://coveralls.io/repos/neocities/neocities/badge.svg?branch=master&service=github)](https://coveralls.io/github/neocities/neocities?branch=master)
The web site for Neocities! It's open source. Want a feature on the site? Send a pull request!
@ -17,6 +17,8 @@ vagrant up --provision
![Vagrant takes a while, make a pizza while waiting](https://i.imgur.com/dKa8LUs.png)
Make a copy of `config.yml.template` in the root directory, and rename it to `config.yml`. Then:
```
vagrant ssh
bundle exec rackup -o 0.0.0.0
@ -28,7 +30,7 @@ Now you can access the running site from your browser: http://127.0.0.1:9292
If you'd like to fix a bug, or make an improvement, or add a new feature, it's easy! Just send us a Pull Request.
1. Fork it (<http://github.com/YOURUSERNAME/neocities/fork>)
1. Fork it (https://github.com/neocities/neocities/fork)
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)

432
Rakefile
View file

@ -14,53 +14,48 @@ end
task :default => :test
=begin
desc "send domain update email"
task :send_domain_update_email => [:environment] do
Site.exclude(domain: nil).exclude(domain: '').all.each do |site|
msg = <<-HERE
MESSAGE GOES HERE TEST
HERE
site.send_email(
subject: 'SUBJECT GOES HERE',
body: msg
)
end
end
=end
desc "parse logs"
task :parse_logs => [:environment] do
desc "prune logs"
task :prune_logs => [:environment] do
Stat.prune!
StatLocation.prune!
StatReferrer.prune!
StatPath.prune!
end
desc "parse logs"
task :parse_logs => [:environment] do
Stat.parse_logfiles $config['logs_path']
end
desc 'Update disposable email blacklist'
task :update_disposable_email_blacklist => [:environment] do
uri = URI.parse('https://raw.githubusercontent.com/martenson/disposable-email-domains/master/disposable_email_blocklist.conf')
File.write(Site::DISPOSABLE_EMAIL_BLACKLIST_PATH, Net::HTTP.get(uri))
# Formerly: https://raw.githubusercontent.com/martenson/disposable-email-domains/master/disposable_email_blocklist.conf
uri = URI.parse('https://raw.githubusercontent.com/disposable/disposable-email-domains/master/domains.txt')
File.write(Site::DISPOSABLE_EMAIL_BLACKLIST_PATH, HTTP.get(uri))
end
desc 'Update banned IPs list'
task :update_blocked_ips => [:environment] do
IO.copy_stream(
open('https://www.stopforumspam.com/downloads/listed_ip_90.zip'),
'/tmp/listed_ip_90.zip'
)
filename = 'listed_ip_365_ipv46'
zip_path = "/tmp/#{filename}.zip"
Zip::Archive.open('/tmp/listed_ip_90.zip') do |ar|
ar.fopen('listed_ip_90.txt') do |f|
ips = f.read
File.open(zip_path, 'wb') do |file|
response = HTTP.get "https://www.stopforumspam.com/downloads/#{filename}.zip"
response.body.each do |chunk|
file.write chunk
end
end
Zip::File.open(zip_path) do |zip_file|
zip_file.each do |entry|
if entry.name == "#{filename}.txt"
ips = entry.get_input_stream.read
insert_hashes = []
ips.each_line { |ip| insert_hashes << { ip: ip.strip, created_at: Time.now } }
ips = nil
# Database transaction
DB.transaction do
DB[:blocked_ips].delete
DB[:blocked_ips].multi_insert insert_hashes
@ -69,171 +64,7 @@ task :update_blocked_ips => [:environment] do
end
end
desc 'parse tor exits'
task :parse_tor_exits => [:environment] do
exit_ips = Net::HTTP.get(URI.parse('https://check.torproject.org/exit-addresses'))
exit_ips.split("\n").collect {|line|
line.match(/ExitAddress (\d+\.\d+\.\d+\.\d+)/)&.captures&.first
}.compact
# ^^ Array of ip addresses of known exit nodes
end
desc 'Compile nginx mapfiles'
task :compile_nginx_mapfiles => [:environment] do
FileUtils.mkdir_p './files/maps'
File.open('./files/maps/domains.txt', 'w') do |file|
Site.exclude(domain: nil).exclude(domain: '').select(:username,:domain).all.each do |site|
file.write ".#{site.values[:domain]} #{site.username};\n"
end
end
File.open('./files/maps/supporters.txt', 'w') do |file|
Site.select(:username, :domain).exclude(plan_type: 'free').exclude(plan_type: nil).all.each do |parent_site|
sites = [parent_site] + parent_site.children
sites.each do |site|
file.write "#{site.username}.neocities.org 1;\n"
unless site.host.match(/\.neocities\.org$/)
file.write ".#{site.values[:domain]} 1;\n"
end
end
end
end
File.open('./files/maps/subdomain-to-domain.txt', 'w') do |file|
Site.select(:username, :domain).exclude(domain: nil).exclude(domain: '').all.each do |site|
file.write "#{site.username}.neocities.org #{site.values[:domain]};\n"
end
end
File.open('./files/maps/sandboxed.txt', 'w') do |file|
usernames = DB["select username from sites where created_at > ? and parent_site_id is null and (plan_type is null or plan_type='free') and is_banned != 't' and is_deleted != 't'", 2.days.ago].all.collect {|s| s[:username]}.each {|username| file.write "#{username} 1;\n"}
end
# Compile letsencrypt ssl keys
sites = DB[%{select username,ssl_key,ssl_cert,domain from sites where ssl_cert is not null and ssl_key is not null and (domain is not null or domain != '') and is_banned != 't' and is_deleted != 't'}].all
ssl_path = './files/maps/ssl'
FileUtils.mkdir_p ssl_path
sites.each do |site|
[site[:domain], "www.#{site[:domain]}"].each do |domain|
begin
key = OpenSSL::PKey::RSA.new site[:ssl_key]
crt = OpenSSL::X509::Certificate.new site[:ssl_cert]
rescue => e
puts "SSL ERROR: #{e.class} #{e.inspect}"
next
end
File.open(File.join(ssl_path, "#{domain}.key"), 'wb') {|f| f.write key.to_der}
File.open(File.join(ssl_path, "#{domain}.crt"), 'wb') {|f| f.write site[:ssl_cert]}
end
end
end
desc 'Produce SSL config package for proxy'
task :buildssl => [:environment] do
sites = Site.select(:id, :username, :domain, :ssl_key, :ssl_cert).
exclude(domain: nil).
exclude(ssl_key: nil).
exclude(ssl_cert: nil).
all
payload = []
begin
FileUtils.rm './files/sslsites.zip'
rescue Errno::ENOENT
end
Zip::Archive.open('./files/sslsites.zip', Zip::CREATE) do |ar|
ar.add_dir 'ssl'
sites.each do |site|
ar.add_buffer "ssl/#{site.username}.key", site.ssl_key
ar.add_buffer "ssl/#{site.username}.crt", site.ssl_cert
payload << {username: site.username, domain: site.domain}
end
ar.add_buffer 'sslsites.json', payload.to_json
end
end
desc 'Set existing stripe customers to internal supporter plan'
task :primenewstriperunonlyonce => [:environment] do
# Site.exclude(stripe_customer_id: nil).all.each do |site|
# site.plan_type = 'supporter'
# site.save_changes validate: false
# end
Site.exclude(stripe_customer_id: nil).where(plan_type: nil).where(plan_ended: false).all.each do |s|
customer = Stripe::Customer.retrieve(s.stripe_customer_id)
subscription = customer.subscriptions.first
next if subscription.nil?
puts "set subscription id to #{subscription.id}"
puts "set plan type to #{subscription.plan.id}"
s.stripe_subscription_id = subscription.id
s.plan_type = subscription.plan.id
s.save_changes(validate: false)
end
end
desc 'dedupe tags'
task :dedupetags => [:environment] do
Tag.all.each do |tag|
begin
tag.reload
rescue Sequel::Error => e
next if e.message =~ /Record not found/
end
matching_tags = Tag.exclude(id: tag.id).where(name: tag.name).all
matching_tags.each do |matching_tag|
DB[:sites_tags].where(tag_id: matching_tag.id).update(tag_id: tag.id)
matching_tag.delete
end
end
end
desc 'Clean tags'
task :cleantags => [:environment] do
Site.select(:id).all.each do |site|
if site.tags.length > 5
site.tags.slice(5, site.tags.length).each {|tag| site.remove_tag tag}
end
end
empty_tag = Tag.where(name: '').first
if empty_tag
DB[:sites_tags].where(tag_id: empty_tag.id).delete
end
Tag.all.each do |tag|
if tag.name.length > Tag::NAME_LENGTH_MAX || tag.name.match(/ /)
DB[:sites_tags].where(tag_id: tag.id).delete
DB[:tags].where(id: tag.id).delete
else
tag.update name: tag.name.downcase.strip
end
end
Tag.where(name: 'porn').first.update is_nsfw: true
end
desc 'update screenshots'
task :update_screenshots => [:environment] do
Site.select(:username).where(site_changed: true, is_banned: false, is_crashing: false).filter(~{updated_at: nil}).order(:updated_at.desc).all.each do |site|
ScreenshotWorker.perform_async site.username, 'index.html'
end
FileUtils.rm zip_path
end
desc 'rebuild_thumbnails'
@ -256,142 +87,11 @@ task :rebuild_thumbnails => [:environment] do
end
end
desc 'prime_space_used'
task :prime_space_used => [:environment] do
Site.select(:id,:username,:space_used).all.each do |s|
s.space_used = s.actual_space_used
s.save_changes validate: false
end
end
desc 'prime site_updated_at'
task :prime_site_updated_at => [:environment] do
Site.select(:id,:username,:site_updated_at, :updated_at).all.each do |s|
s.site_updated_at = s.updated_at
s.save_changes validate: false
end
end
desc 'prime_site_files'
task :prime_site_files => [:environment] do
Site.where(is_banned: false).where(is_deleted: false).select(:id, :username).all.each do |site|
Dir.glob(File.join(site.files_path, '**/*')).each do |file|
path = file.gsub(site.base_files_path, '').sub(/^\//, '')
site_file = site.site_files_dataset[path: path]
if site_file.nil?
mtime = File.mtime file
site_file_opts = {
path: path,
updated_at: mtime,
created_at: mtime
}
if File.directory? file
site_file_opts.merge! is_directory: true
else
site_file_opts.merge!(
size: File.size(file),
sha1_hash: Digest::SHA1.file(file).hexdigest
)
end
site.add_site_file site_file_opts
end
end
end
end
desc 'dedupe_follows'
task :dedupe_follows => [:environment] do
follows = Follow.all
deduped_follows = Follow.all.uniq {|f| "#{f.site_id}_#{f.actioning_site_id}"}
follows.each do |follow|
unless deduped_follows.include?(follow)
puts "deleting dedupe: #{follow.inspect}"
follow.delete
end
end
end
desc 'flush_empty_index_sites'
task :flush_empty_index_sites => [:environment] do
sites = Site.select(:id).all
counter = 0
sites.each do |site|
if site.empty_index?
counter += 1
site.site_changed = false
site.save_changes validate: false
end
end
puts "#{counter} sites set to not changed."
end
desc 'compute_scores'
task :compute_scores => [:environment] do
Site.compute_scores
end
=begin
desc 'Update screenshots'
task :update_screenshots => [:environment] do
Site.select(:username).filter(is_banned: false).filter(~{updated_at: nil}).order(:updated_at.desc).all.collect {|s|
ScreenshotWorker.perform_async s.username
}
end
=end
desc 'prime_classifier'
task :prime_classifier => [:environment] do
Site.select(:id, :username).where(is_banned: false, is_deleted: false).all.each do |site|
next if site.site_files_dataset.where(classifier: 'spam').count > 0
html_files = site.site_files_dataset.where(path: /\.html$/).all
html_files.each do |html_file|
print "training #{site.username}/#{html_file.path}..."
site.train html_file.path
print "done.\n"
end
end
end
desc 'train_spam'
task :train_spam => [:environment] do
paths = File.read('./spam.txt')
paths.split("\n").each do |path|
username, site_file_path = path.match(/^([a-zA-Z0-9_\-]+)\/(.+)$/i).captures
site = Site[username: username]
next if site.nil?
site_file = site.site_files_dataset.where(path: site_file_path).first
next if site_file.nil?
site.train site_file_path, :spam
site.ban!
puts "Deleted #{site_file_path}, banned #{site.username}"
end
end
desc 'regenerate_ssl_certs'
task :regenerate_ssl_certs => [:environment] do
sites = DB[%{select id from sites where (domain is not null or domain != '') and is_banned != 't' and is_deleted != 't'}].all
seconds = 2
sites.each do |site|
LetsEncryptWorker.perform_in seconds, site[:id]
seconds += 10
end
puts "#{sites.length.to_s} records are primed"
end
desc 'renew_ssl_certs'
task :renew_ssl_certs => [:environment] do
delay = 0
@ -408,24 +108,6 @@ task :purge_tmp_turds => [:environment] do
end
end
desc 'shard_migration'
task :shard_migration => [:environment] do
#Site.exclude(is_deleted: true).exclude(is_banned: true).select(:username).each do |site|
# FileUtils.mkdir_p File.join('public', 'testsites', site.username)
#end
#exit
Dir.chdir('./public/testsites')
Dir.glob('*').each do |dir|
sharding_dir = Site.sharding_dir(dir)
FileUtils.mkdir_p File.join('..', 'newtestsites', sharding_dir)
FileUtils.mv dir, File.join('..', 'newtestsites', sharding_dir)
end
sleep 1
FileUtils.rmdir './public/testsites'
sleep 1
FileUtils.mv './public/newtestsites', './public/testsites'
end
desc 'compute_follow_count_scores'
task :compute_follow_count_scores => [:environment] do
@ -439,41 +121,10 @@ task :compute_follow_count_scores => [:environment] do
end
end
desc 'prime_redis_proxy_ssl'
task :prime_redis_proxy_ssl => [:environment] do
site_ids = DB[%{
select id from sites where domain is not null and ssl_cert is not null and ssl_key is not null
and is_deleted != ? and is_banned != ?
}, true, true].all.collect {|site_id| site_id[:id]}
site_ids.each do |site_id|
Site[site_id].store_ssl_in_redis_proxy
end
end
desc 'dedupe_site_blocks'
task :dedupe_site_blocks => [:environment] do
duped_blocks = []
block_ids = Block.select(:id).all.collect {|b| b.id}
block_ids.each do |block_id|
next unless duped_blocks.select {|db| db.id == block_id}.empty?
block = Block[block_id]
if block
blocks = Block.exclude(id: block.id).where(site_id: block.site_id).where(actioning_site_id: block.actioning_site_id).all
duped_blocks << blocks
duped_blocks.flatten!
end
end
duped_blocks.each do |duped_block|
duped_block.destroy
end
end
desc 'ml_screenshots_list_dump'
task :ml_screenshots_list_dump => [:environment] do
['phishing', 'spam', 'ham', nil].each do |classifier|
File.open("./files/screenshot-urls-#{classifier.to_s}.txt", 'w') do |fp|
File.open("./files/screenshot-urls#{classifier.nil? ? '' : '-'+classifier.to_s}.txt", 'w') do |fp|
SiteFile.where(classifier: classifier).where(path: 'index.html').each do |site_file|
begin
fp.write "#{site_file.site.screenshot_url('index.html', Site::SCREENSHOT_RESOLUTIONS.first)}\n"
@ -488,11 +139,13 @@ desc 'generate_sitemap'
task :generate_sitemap => [:environment] do
sorted_sites = {}
# We pop off array, so highest scores go last.
sites = Site.
select(:id, :username, :updated_at, :profile_enabled).
where(site_changed: true).
exclude(updated_at: nil).
order(:follow_count, :updated_at).
exclude(is_deleted: true).
order(:score).
all
site_files = []
@ -500,13 +153,13 @@ task :generate_sitemap => [:environment] do
sites.each do |site|
site.site_files_dataset.exclude(path: 'not_found.html').where(path: /\.html?$/).all.each do |site_file|
if site.file_uri(site_file.path) == site.uri+'/'
if site.uri(site_file.path) == site.uri
priority = 0.5
else
priority = 0.4
end
site_files << [site.file_uri(site_file.path), site_file.updated_at.utc.iso8601, priority]
site_files << [site.uri(site_file.path), site_file.updated_at.utc.iso8601, priority]
end
end
@ -585,19 +238,22 @@ task :generate_sitemap => [:environment] do
end
gz.write %{</sitemapindex>}
end
end
desc 'ml_screenshots_list_dump'
task :ml_screenshots_list_dump => [:environment] do
['phishing', 'spam', 'ham', nil].each do |classifier|
File.open("./files/screenshot-urls-#{classifier.to_s}.txt", 'w') do |fp|
SiteFile.where(classifier: classifier).where(path: 'index.html').each do |site_file|
desc 'dedupe tags'
task :dedupetags => [:environment] do
Tag.all.each do |tag|
begin
fp.write "#{site_file.site.screenshot_url('index.html', Site::SCREENSHOT_RESOLUTIONS.first)}\n"
rescue NoMethodError
end
end
end
end
tag.reload
rescue Sequel::Error => e
next if e.message =~ /Record not found/
end
matching_tags = Tag.exclude(id: tag.id).where(name: tag.name).all
matching_tags.each do |matching_tag|
DB[:sites_tags].where(tag_id: matching_tag.id).update(tag_id: tag.id)
matching_tag.delete
end
end
end

4
Vagrantfile vendored
View file

@ -1,12 +1,12 @@
VAGRANTFILE_API_VERSION = '2'
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = 'ubuntu/trusty64'
config.vm.box = 'ubuntu/jammy64'
config.vm.provision :shell, path: './vagrant/development.sh'
config.vm.network :forwarded_port, guest: 9292, host: 9292
config.vm.provider :virtualbox do |vb|
vb.customize ['modifyvm', :id, '--memory', '1536']
vb.customize ['modifyvm', :id, '--memory', '8192']
vb.name = 'neocities'
end
end

48
app.rb
View file

@ -4,7 +4,7 @@ require './app_helpers.rb'
use Rack::Session::Cookie, key: 'neocities',
path: '/',
expire_after: 31556926, # one year in seconds
secret: $config['session_secret'],
secret: Base64.strict_decode64($config['session_secret']),
httponly: true,
same_site: :lax,
secure: ENV['RACK_ENV'] == 'production'
@ -21,6 +21,13 @@ helpers do
def csrf_token_input_html
%{<input name="csrf_token" type="hidden" value="#{csrf_token}">}
end
def hcaptcha_input
%{
<script src="https://hcaptcha.com/1/api.js" async defer></script>
<div id="captcha_input" class="h-captcha" data-sitekey="#{$config['hcaptcha_site_key']}"></div>
}
end
end
set :protection, :frame_options => "DENY"
@ -51,6 +58,7 @@ GEOCITIES_NEIGHBORHOODS = %w{
televisioncity
tokyo
vienna
westhollywood
yosemite
}.freeze
@ -61,29 +69,53 @@ def redirect_to_internet_archive_for_geocities_sites
end
end
WHITELISTED_POST_PATHS = ['/create_validate_all', '/create_validate', '/create'].freeze
before do
if request.path.match /^\/api\//i
@api = true
content_type :json
elsif request.path.match /^\/webhooks\//
# Skips the CSRF/validation check for stripe web hooks
elsif email_not_validated? && !(request.path =~ /^\/site\/.+\/confirm_email|^\/settings\/change_email|^\/signout|^\/welcome|^\/supporter/)
elsif current_site && current_site.email_not_validated? && !(request.path =~ /^\/site\/.+\/confirm_email|^\/settings\/change_email|^\/welcome|^\/supporter|^\/signout/)
redirect "/site/#{current_site.username}/confirm_email"
elsif current_site && current_site.phone_verification_needed? && !(request.path =~ /^\/site\/.+\/confirm_email|^\/settings\/change_email|^\/site\/.+\/confirm_phone|^\/welcome|^\/supporter|^\/signout/)
redirect "/site/#{current_site.username}/confirm_phone"
elsif current_site && current_site.tutorial_required && !(request.path =~ /^\/site\/.+\/confirm_email|^\/settings\/change_email|^\/site\/.+\/confirm_phone|^\/welcome|^\/supporter|^\/tutorial\/.+/)
redirect '/tutorial/html/1'
else
content_type :html, 'charset' => 'utf-8'
redirect '/' if request.post? && !csrf_safe?
redirect '/' if request.post? && !WHITELISTED_POST_PATHS.include?(request.path_info) && !csrf_safe?
end
if params[:page]
params[:page] = params[:page].to_s
unless params[:page] =~ /^\d+$/ && params[:page].to_i > 0
params[:page] = '1'
end
end
if params[:tag]
begin
params.delete 'tag' if params[:tag].nil? || !params[:tag].is_a?(String) || params[:tag].strip.empty? || params[:tag].match?(Tag::INVALID_TAG_REGEX)
rescue Encoding::CompatibilityError
params.delete 'tag'
end
end
end
after do
if @api
request.session_options[:skip] = true
end
else
# Set issue timestamp on session cookie if it doesn't exist yet
session['i'] = Time.now.to_i if session && !session['i'] && session['id']
end
#after do
#response.headers['Content-Security-Policy'] = %{block-all-mixed-content; default-src 'self'; connect-src 'self' https://api.stripe.com; frame-src https://www.google.com/recaptcha/ https://js.stripe.com; script-src 'self' 'unsafe-inline' https://www.google.com/recaptcha/ https://www.gstatic.com/recaptcha/ https://js.stripe.com; style-src 'self' 'unsafe-inline'; img-src 'self' data: }
#end
unless self.class.development?
response.headers['Content-Security-Policy'] = %{default-src 'self' data: blob: 'unsafe-inline'; script-src 'self' blob: 'unsafe-inline' 'unsafe-eval' https://hcaptcha.com https://*.hcaptcha.com https://js.stripe.com; style-src 'self' 'unsafe-inline' https://hcaptcha.com https://*.hcaptcha.com; connect-src 'self' https://hcaptcha.com https://*.hcaptcha.com https://api.stripe.com; frame-src 'self' https://hcaptcha.com https://*.hcaptcha.com https://js.stripe.com}
end
end
not_found do
api_not_found if @api
@ -93,6 +125,7 @@ not_found do
end
error do
=begin
EmailWorker.perform_async({
from: 'web@neocities.org',
to: 'errors@neocities.org',
@ -100,6 +133,7 @@ error do
body: erb(:'templates/email/error', layout: false),
no_footer: true
})
=end
if @api
api_error 500, 'server_error', 'there has been an unknown server error, please try again later'

View file

@ -1,73 +1,54 @@
get '/activity' do
#expires 7200, :public, :must_revalidate if self.class.production? # 2 hours
params[:activity] = 'global' # FIXME this is a bad hack
global_dataset = Event.global_dataset
@page = params[:page] || 1
if params[:event_id]
global_dataset = global_dataset.where Sequel.qualify(:events, :id) => params[:event_id]
if params[:tag]
query1 = Event
.join(:sites, id: :site_id)
.join(:sites_tags, site_id: :id)
.join(:tags, id: :tag_id)
.where(tags__name: params[:tag])
.where(events__is_deleted: false, sites__is_deleted: false)
.where{sites__score > Event::ACTIVITY_TAG_SCORE_LIMIT}
.where(sites__is_nsfw: false)
.where(follow_id: nil)
.select_all(:events)
query2 = Event
.join(:sites, id: :actioning_site_id)
.join(:sites_tags, site_id: :id)
.join(:tags, id: :tag_id)
.where(tags__name: params[:tag])
.where(events__is_deleted: false, sites__is_deleted: false)
.where{sites__score > Event::ACTIVITY_TAG_SCORE_LIMIT}
.where(sites__is_nsfw: false)
.where(follow_id: nil)
.select_all(:events)
if current_site
blocking_site_ids = current_site.blocking_site_ids
query1 = query1.where(Sequel.|({events__site_id: nil}, ~{events__site_id: blocking_site_ids})).where(Sequel.|({events__actioning_site_id: nil}, ~{events__actioning_site_id: blocking_site_ids}))
query2 = query2.where(Sequel.|({events__site_id: nil}, ~{events__site_id: blocking_site_ids})).where(Sequel.|({events__actioning_site_id: nil}, ~{events__actioning_site_id: blocking_site_ids}))
end
=begin
initial_events = global_dataset.all
events = []
initial_events.each do |event|
site = Site.select(:id).where(id: event.site_id).first
actioning_site = Site.select(:id).where(id: event.actioning_site_id).first
disclude_event = false
disclude_event = true if site.is_a_jerk?
if event.tip_id
disclude_event = true if actioning_site && actioning_site.is_a_jerk?
ds = query1.union(query2, all: false).order(Sequel.desc(:created_at))
else
disclude_event = true if actioning_site && (actioning_site.is_a_jerk? || actioning_site.follows_dataset.count < 2)
ds = Event.news_feed_default_dataset.exclude(sites__is_nsfw: true)
if current_site
blocking_site_ids = current_site.blocking_site_ids
ds = ds.where(Sequel.|({events__site_id: nil}, ~{events__site_id: blocking_site_ids})).where(Sequel.|({events__actioning_site_id: nil}, ~{events__actioning_site_id: blocking_site_ids}))
end
events.push(event) unless disclude_event
ds = ds.where(
Sequel.expr(Sequel[:sites][:score] > Event::GLOBAL_SCORE_LIMIT) |
Sequel.expr(Sequel[:actioning_sites][:score] > Event::GLOBAL_SCORE_LIMIT)
)
end
initial_site_change_events = Event.global_site_changes_dataset.limit(100).all
site_change_events = []
initial_site_change_events.each do |event|
site = Site.select(:id).where(id: event.site_id).first
site_change_events.push(event) if !site.is_a_jerk? && site.follows_dataset.count > 1
end
@events = []
events.each do |event|
unless site_change_events.empty?
until site_change_events.first.created_at < event.created_at
@events << site_change_events.shift
break if site_change_events.empty?
end
end
@events << event
end
=end
if SimpleCache.expired?(:activity_event_ids)
initial_events = Event.global_site_changes_dataset.limit(500).all
@events = []
initial_events.each do |event|
event_site = event.site
next if @events.select {|e| e.site_id == event.site_id}.count >= 1
next if event_site.is_a_jerk?
next unless event_site.follows_dataset.count > 1
@events.push event
end
SimpleCache.store :activity_event_ids, @events.collect {|e| e.id}, 60.minutes
else
@events = Event.where(id: SimpleCache.get(:activity_event_ids)).order(:created_at.desc).all
end
@pagination_dataset = ds.paginate @page.to_i, Event::GLOBAL_PAGINATION_LENGTH
@events = @pagination_dataset.all
erb :'activity'
end

View file

@ -250,7 +250,8 @@ post '/admin/banhammer' do
StopForumSpamWorker.perform_async(
username: site.username,
email: site.email,
ip: site.ip
ip: site.ip,
classifier: params[:classifier]
)
end
end
@ -300,5 +301,9 @@ get '/admin/masquerade/:username' do
end
def require_admin
redirect '/' unless signed_in? && current_site.is_admin
redirect '/' unless is_admin?
end
def is_admin?
signed_in? && current_site.is_admin
end

View file

@ -31,6 +31,7 @@ get '/api/list' do
new_file[:path] = file[:path]
new_file[:is_directory] = file[:is_directory]
new_file[:size] = file[:size] unless file[:is_directory]
new_file[:created_at] = file[:created_at].rfc2822
new_file[:updated_at] = file[:updated_at].rfc2822
new_file[:sha1_hash] = file[:sha1_hash] unless file[:is_directory]
files << new_file
@ -41,14 +42,55 @@ get '/api/list' do
api_success files: files
end
def extract_files(params, files = [])
# Check if the entire input is directly an array of files
if params.is_a?(Array)
params.each do |item|
# Call extract_files on each item if it's an Array or Hash to handle nested structures
if item.is_a?(Array) || item.is_a?(Hash)
extract_files(item, files)
end
end
elsif params.is_a?(Hash)
params.each do |key, value|
# If the value is a Hash and contains a :tempfile key, it's considered an uploaded file.
if value.is_a?(Hash) && value.has_key?(:tempfile) && !value[:tempfile].nil?
files << {filename: value[:name], tempfile: value[:tempfile]}
elsif value.is_a?(Array)
value.each do |val|
if val.is_a?(Hash) && val.has_key?(:tempfile) && !val[:tempfile].nil?
# Directly add the file info if it's an uploaded file within an array
files << {filename: val[:name], tempfile: val[:tempfile]}
elsif val.is_a?(Hash) || val.is_a?(Array)
# Recursively search for more files if the element is a Hash or Array
extract_files(val, files)
end
end
elsif value.is_a?(Hash)
# Recursively search for more files if the value is a Hash
extract_files(value, files)
end
end
end
files
end
post '/api/upload' do
require_api_credentials
files = extract_files params
files = []
params.each do |k,v|
next unless v.is_a?(Hash) && v[:tempfile]
path = k.to_s
files << {filename: k || v[:filename], tempfile: v[:tempfile]}
if !params[:username].blank?
site = Site[username: params[:username]]
if site.nil? || site.is_deleted
api_error 400, 'site_not_found', "could not find site"
end
if site.owned_by?(current_site)
@_site = site
else
api_error 400, 'site_not_allowed', "not allowed to change this site with your current logged in site"
end
end
api_error 400, 'missing_files', 'you must provide files to upload' if files.empty?
@ -60,16 +102,29 @@ post '/api/upload' do
end
if current_site.too_many_files?(files.length)
api_error 400, 'too_many_files', "cannot exceed the maximum site files limit (#{current_site.plan_feature(:maximum_site_files)}), #{current_site.supporter? ? 'please contact support' : 'please upgrade to a supporter account'}"
api_error 400, 'too_many_files', "cannot exceed the maximum site files limit (#{current_site.plan_feature(:maximum_site_files)})"
end
files.each do |file|
file[:filename] = Rack::Utils.unescape file[:filename]
if !current_site.okay_to_upload?(file)
api_error 400, 'invalid_file_type', "#{file[:filename]} is not a valid file type (or contains not allowed content) for this site, files have not been uploaded"
api_error 400, 'invalid_file_type', "#{file[:filename]} is not an allowed file type for free sites, supporter required"
end
if File.directory? file[:filename]
api_error 400, 'directory_exists', 'this name is being used by a directory, cannot continue'
api_error 400, 'directory_exists', "#{file[:filename]} being used by a directory"
end
if current_site.file_size_too_large? file[:tempfile].size
api_error 400, 'file_too_large' "#{file[:filename]} is too large"
end
if SiteFile.path_too_long? file[:filename]
api_error 400, 'file_path_too_long', "#{file[:filename]} path is too long"
end
if SiteFile.name_too_long? file[:filename]
api_error 400, 'file_name_too_long', "#{file[:filename]} filename is too long (exceeds #{SiteFile::FILE_NAME_CHARACTER_LIMIT} characters)"
end
end
@ -167,8 +222,7 @@ def api_info_for(site)
created_at: site.created_at.rfc2822,
last_updated: site.site_updated_at ? site.site_updated_at.rfc2822 : nil,
domain: site.domain,
tags: site.tags.collect {|t| t.name},
latest_ipfs_hash: site.latest_archive ? site.latest_archive.ipfs_hash : nil
tags: site.tags.collect {|t| t.name}
}
}
end
@ -184,9 +238,10 @@ post '/api/:name' do
end
def require_api_credentials
return true if current_site && csrf_safe?
if !request.env['HTTP_AUTHORIZATION'].nil?
init_api_credentials
api_error(403, 'email_not_validated', 'you need to validate your email address before using the API') if email_not_validated?
else
api_error_invalid_auth
end
@ -214,7 +269,7 @@ def init_api_credentials
api_error_invalid_auth
end
if site.nil? || site.is_banned || site.is_deleted
if site.nil? || site.is_banned || site.is_deleted || !(site.required_validations_met?)
api_error_invalid_auth
end

View file

@ -1,9 +1,11 @@
get '/browse/?' do
@surfmode = false
@page = params[:page].to_i
@page = 1 if @page == 0
@page = params[:page]
@page = 1 if @page.not_an_integer?
params.delete 'tag' if params[:tag].nil? || params[:tag].strip.empty?
if params[:tag]
params[:tag] = params[:tag].gsub(Tag::INVALID_TAG_REGEX, '').gsub(/\s+/, '').slice(0, Tag::NAME_LENGTH_MAX)
@title = "Sites tagged #{params[:tag]}"
end
if is_education?
ds = education_sites_dataset
@ -11,7 +13,7 @@ get '/browse/?' do
ds = browse_sites_dataset
end
ds = ds.paginate @page, Site::BROWSE_PAGINATION_LENGTH
ds = ds.paginate @page.to_i, Site::BROWSE_PAGINATION_LENGTH
@pagination_dataset = ds
@sites = ds.all
@ -23,10 +25,6 @@ get '/browse/?' do
@site_tags[site_id] = tags.select {|t| t[:site_id] == site_id}.collect {|t| t[:name]}
end
if params[:tag]
@title = "Sites tagged #{params[:tag]}"
end
erb :browse
end
@ -55,41 +53,50 @@ def browse_sites_dataset
end
end
if current_site && current_site.is_admin && params[:sites]
ds = ds.where sites__username: params[:sites].split(',')
return ds
end
params[:sort_by] ||= 'special_sauce'
case params[:sort_by]
when 'special_sauce'
ds = ds.exclude score: nil
ds = ds.order :score.desc
when 'followers'
ds = ds.order :follow_count.desc, :updated_at.desc
when 'supporters'
ds = ds.where id: Site.supporter_ids
ds = ds.order :follow_count.desc, :views.desc, :site_updated_at.desc
ds = ds.where{score > 1} unless params[:tag]
ds = ds.order :score.desc, :follow_count.desc, :views.desc, :site_updated_at.desc
when 'random'
ds = ds.where{score > 3} unless params[:tag]
ds = ds.order(Sequel.lit('RANDOM()'))
when 'most_followed'
ds = ds.where{views > Site::BROWSE_MINIMUM_FOLLOWER_VIEWS}
ds = ds.where{follow_count > Site::BROWSE_FOLLOWER_MINIMUM_FOLLOWS}
ds = ds.where{updated_at > Site::BROWSE_FOLLOWER_UPDATED_AT_CUTOFF.ago} unless params[:tag]
ds = ds.order :follow_count.desc, :score.desc, :updated_at.desc
when 'last_updated'
ds = ds.where{score > 3} unless params[:tag]
ds = ds.exclude site_updated_at: nil
ds = ds.order :site_updated_at.desc
when 'newest'
ds = ds.where{views > Site::BROWSE_MINIMUM_VIEWS} unless is_admin?
ds = ds.exclude site_updated_at: nil
ds = ds.order :created_at.desc, :views.desc
when 'oldest'
ds = ds.where{score > 0.4} unless params[:tag]
ds = ds.exclude site_updated_at: nil
ds = ds.order(:created_at, :views.desc)
when 'hits'
ds = ds.where{score > 1}
ds = ds.order(:hits.desc, :site_updated_at.desc)
when 'views'
ds = ds.where{score > 3}
ds = ds.order(:views.desc, :site_updated_at.desc)
when 'featured'
ds = ds.exclude featured_at: nil
ds = ds.order :featured_at.desc
when 'hits'
ds = ds.where{views > 100}
ds = ds.order(:hits.desc, :site_updated_at.desc)
when 'views'
ds = ds.where{views > 100}
ds = ds.order(:views.desc, :site_updated_at.desc)
when 'newest'
ds = ds.order(:created_at.desc, :views.desc)
when 'oldest'
ds = ds.where{views > 100}
ds = ds.order(:created_at, :views.desc)
when 'random'
ds = ds.where{views > 100}
ds = ds.where 'random() < 0.01'
when 'last_updated'
ds = ds.where{views > 100}
params[:sort_by] = 'last_updated'
ds = ds.exclude(site_updated_at: nil)
ds = ds.order(:site_updated_at.desc, :views.desc)
when 'tipping_enabled'
ds = ds.where tipping_enabled: true
ds = ds.where("(tipping_paypal is not null and tipping_paypal != '') or (tipping_bitcoin is not null and tipping_bitcoin != '')")
ds = ds.where{views > 10_000}
ds = ds.where{score > 1} unless params[:tag]
ds = ds.group :sites__id
ds = ds.order :follow_count.desc, :views.desc, :updated_at.desc
when 'blocks'
@ -98,10 +105,6 @@ def browse_sites_dataset
ds = ds.inner_join :blocks, :site_id => :id
ds = ds.group :sites__id
ds = ds.order :total.desc
else
params[:sort_by] = 'followers'
ds = ds.where{views > 10_000}
ds = ds.order :follow_count.desc, :views.desc, :updated_at.desc
end
ds = ds.where ['sites.is_nsfw = ?', (params[:is_nsfw] == 'true' ? true : false)]
@ -116,3 +119,71 @@ def browse_sites_dataset
ds
end
def daily_search_max?
query_count = $redis_cache.get('search_query_count').to_i
query_count >= $config['google_custom_search_query_limit']
end
get '/browse/search' do
@title = 'Site Search'
@daily_search_max_reached = daily_search_max?
if @daily_search_max_reached
params[:q] = nil
end
if !params[:q].blank?
created = $redis_cache.set('search_query_count', 1, nx: true, ex: 86400)
$redis_cache.incr('search_query_count') unless created
@start = params[:start].to_i
@start = 0 if @start < 0
@resp = JSON.parse HTTP.get('https://www.googleapis.com/customsearch/v1', params: {
key: $config['google_custom_search_key'],
cx: $config['google_custom_search_cx'],
safe: 'active',
start: @start,
q: Rack::Utils.escape(params[:q]) + ' -filetype:pdf -filetype:txt site:*.neocities.org'
})
@items = []
if @total_results != 0 && @resp['error'].nil? && @resp['searchInformation']['totalResults'] != "0"
@total_results = @resp['searchInformation']['totalResults'].to_i
@resp['items'].each do |item|
link = Addressable::URI.parse(item['link'])
unencoded_path = Rack::Utils.unescape(Rack::Utils.unescape(link.path)) # Yes, it needs to be decoded twice
item['unencoded_link'] = unencoded_path == '/' ? link.host : link.host+unencoded_path
item['link'] = link
next if link.host == 'neocities.org'
username = link.host.split('.').first
site = Site[username: username]
next if site.nil? || site.is_deleted || site.is_nsfw
screenshot_path = unencoded_path
screenshot_path << 'index' if screenshot_path[-1] == '/'
['.html', '.htm'].each do |ext|
if site.screenshot_exists?(screenshot_path + ext, '540x405')
screenshot_path += ext
break
end
end
item['screenshot_url'] = site.screenshot_url(screenshot_path, '540x405')
@items << item
end
end
else
@items = nil
@total_results = 0
end
erb :'search'
end

View file

@ -2,6 +2,7 @@ post '/comment/:comment_id/toggle_like' do |comment_id|
require_login
content_type :json
comment = Comment[id: comment_id]
return 403 if comment.event.site.is_blocking?(current_site) || current_site.is_blocking?(comment.event.site)
liked_response = comment.toggle_site_like(current_site) ? 'liked' : 'unliked'
{result: liked_response, comment_like_count: comment.comment_likes_dataset.count, liking_site_names: comment.liking_site_usernames}.to_json
end

View file

@ -9,7 +9,11 @@ post '/contact' do
@errors << 'Please fill out all fields'
end
if !recaptcha_valid?
if params[:faq_check] == 'no'
@errors << 'Please check Frequently Asked Questions before sending a contact message'
end
unless hcaptcha_valid?
@errors << 'Captcha was not filled out (or was filled out incorrectly)'
end

View file

@ -1,7 +1,12 @@
CREATE_MATCH_REGEX = /^username$|^password$|^email$|^new_tags_string$|^is_education$/
def education_whitelist_required?
return true if params[:is_education] == 'true' && $config['education_tag_whitelist']
false
end
def education_whitelisted?
return true if params[:is_education] == 'true' && $config['education_tag_whitelist'] && !$config['education_tag_whitelist'].select {|t| params[:new_tags_string].match(t)}.empty?
return true if education_whitelist_required? && !$config['education_tag_whitelist'].select {|t| params[:new_tags_string].match(t)}.empty?
false
end
@ -9,14 +14,23 @@ post '/create_validate_all' do
content_type :json
fields = params.select {|p| p.match CREATE_MATCH_REGEX}
begin
site = Site.new fields
if site.valid?
return [].to_json if education_whitelisted? || params[:'g-recaptcha-response'] || self.class.test?
return [['captcha', 'Please complete the captcha.']].to_json
rescue ArgumentError => e
if e.message == 'input string invalid'
return {error: 'invalid input'}.to_json
else
raise e
end
end
site.errors.collect {|e| [e.first, e.last.first]}.to_json
if site.valid?
return [].to_json if education_whitelisted?
end
resp = site.errors.collect {|e| [e.first, e.last.first]}
resp << ['captcha', 'Please complete the captcha.'] if params[:'h-captcha-response'].empty? && !self.class.test?
resp.to_json
end
post '/create_validate' do
@ -26,7 +40,16 @@ post '/create_validate' do
return {error: 'not a valid field'}.to_json
end
begin
site = Site.new(params[:field] => params[:value])
rescue ArgumentError => e
if e.message == 'input string invalid'
return {error: 'invalid input'}.to_json
else
raise e
end
end
site.is_education = params[:is_education]
site.valid?
@ -41,15 +64,6 @@ end
post '/create' do
content_type :json
if banned?(true)
signout
session[:banned] = true if !session[:banned]
flash[:error] = 'There was an error, please <a href="/contact">contact support</a> to log in.'
redirect '/'
end
dashboard_if_signed_in
@site = Site.new(
@ -62,10 +76,15 @@ post '/create' do
ga_adgroupid: session[:ga_adgroupid]
)
if education_whitelist_required?
if education_whitelisted?
@site.email_confirmed = true
else
if !recaptcha_valid?
flash[:error] = 'The class tag is invalid.'
return {result: 'error'}.to_json
end
else
if !hcaptcha_valid?
flash[:error] = 'The captcha was not valid, please try again.'
return {result: 'error'}.to_json
end
@ -80,6 +99,15 @@ post '/create' do
return {result: 'error'}.to_json
end
if defined?(BlackBox.create_disabled?) && BlackBox.create_disabled?(@site, request)
flash[:error] = 'Site creation is not currently available from your location, please try again later.'
return {result: 'error'}.to_json
end
if defined?(BlackBox.tutorial_required?) && BlackBox.tutorial_required?(@site, request)
@site.tutorial_required = true
end
if !@site.valid?
flash[:error] = @site.errors.first.last.first
return {result: 'error'}.to_json
@ -87,15 +115,38 @@ post '/create' do
end
@site.email_confirmed = true if self.class.development?
@site.phone_verified = true if self.class.development?
begin
@site.phone_verification_required = true if self.class.production? && BlackBox.phone_verification_required?(@site)
rescue => e
EmailWorker.perform_async({
from: 'web@neocities.org',
to: 'errors@neocities.org',
subject: "[Neocities Error] Phone verification exception",
body: "#{e.inspect}\n#{e.backtrace}",
no_footer: true
})
end
begin
@site.save
rescue Sequel::UniqueConstraintViolation => e
if e.message =~ /username.+already exists/
flash[:error] = 'Username already exists.'
return {result: 'error'}.to_json
end
raise e
end
unless education_whitelisted?
send_confirmation_email @site
@site.send_email(
subject: "[Neocities] Welcome to Neocities!",
body: Tilt.new('./views/templates/email_welcome.erb', pretty: true).render(self)
body: Tilt.new('./views/templates/email/welcome.erb', pretty: true).render(self)
)
send_confirmation_email @site
end
session[:id] = @site.id

View file

@ -8,7 +8,7 @@ get '/dashboard' do
current_site.save_changes validate: false
end
erb :'dashboard'
erb :'dashboard/index'
end
def dashboard_init
@ -30,3 +30,11 @@ def dashboard_init
@dir = params[:dir]
@file_list = current_site.file_list @dir
end
get '/dashboard/files' do
require_login
dashboard_init
dont_browser_cache
erb :'dashboard/files', layout: false
end

View file

@ -16,7 +16,7 @@ post '/dmca/contact' do
@errors << 'Please fill out all fields'
end
if !recaptcha_valid?
if !hcaptcha_valid?
@errors << 'Captcha was not filled out (or was filled out incorrectly)'
end

View file

@ -2,6 +2,8 @@ post '/event/:event_id/toggle_like' do |event_id|
require_login
content_type :json
event = Event[id: event_id]
return 403 if event.site && event.site.is_blocking?(current_site)
return 403 if event.actioning_site && event.actioning_site.is_blocking?(current_site)
liked_response = event.toggle_site_like(current_site) ? 'liked' : 'unliked'
{result: liked_response, event_like_count: event.likes_dataset.count, liking_site_names: event.liking_site_usernames}.to_json
end
@ -11,6 +13,9 @@ post '/event/:event_id/comment' do |event_id|
content_type :json
event = Event[id: event_id]
return 403 if event.site && event.site.is_blocking?(current_site)
return 403 if event.actioning_site && event.actioning_site.is_blocking?(current_site)
site = event.site
if(site.is_blocking?(current_site) ||

View file

@ -1,27 +1,21 @@
get '/?' do
if params[:_ga_adgroupid]
session[:ga_adgroupid] = params[:_ga_adgroupid]
end
if current_site
require_login
redirect '/dashboard' if current_site.is_education
@page = params[:page].to_i
@page = 1 if @page == 0
@page = params[:page]
@page = 1 if @page.not_an_integer?
if params[:activity] == 'mine'
events_dataset = current_site.latest_events(@page, 10)
events_dataset = current_site.latest_events(@page)
elsif params[:event_id]
event = Event.select(:id).where(id: params[:event_id]).first
not_found if event.nil?
not_found if event.is_deleted
events_dataset = Event.where(id: params[:event_id]).paginate(1, 1)
elsif params[:activity] == 'global'
events_dataset = Event.global_dataset @page
else
events_dataset = current_site.news_feed(@page, 10)
events_dataset = current_site.news_feed(@page)
end
@pagination_dataset = events_dataset
@ -62,6 +56,7 @@ get '/?' do
@changed_count ||= 0
=begin
if SimpleCache.expired?(:blog_feed_html)
@blog_feed_html = ''
@ -79,6 +74,18 @@ get '/?' do
else
@blog_feed_html = SimpleCache.get :blog_feed_html
end
=end
@blog_feed_html = 'The latest news on Neocities can be found on our blog.'
if SimpleCache.expired?(:featured_sites)
@featured_sites = Site.order(:score.desc).exclude(is_nsfw: true).exclude(is_deleted: true).limit(1000).all.sample(12).collect {|s| {screenshot_url: s.screenshot_url('index.html', '540x405'), uri: s.uri, title: s.title}}
SimpleCache.store :featured_sites, @featured_sites, 1.hour
else
@featured_sites = SimpleCache.get :featured_sites
end
@create_disabled = false
erb :index, layout: :index_layout
end
@ -120,15 +127,6 @@ get '/legal/?' do
erb :'legal'
end
get '/permanent-web' do
redirect '/distributed-web'
end
get '/distributed-web' do
@title = 'The Distributed Web'
erb :'distributed_web'
end
get '/thankyou' do
@title = 'Thank you!'
erb :'thankyou'

View file

@ -13,21 +13,23 @@ post '/send_password_reset' do
sites = Site.get_recovery_sites_with_email params[:email]
if sites.length > 0
token = SecureRandom.uuid.gsub('-', '')
token = SecureRandom.uuid.gsub('-', '')+'-'+Time.now.to_i.to_s
sites.each do |site|
next unless site.parent?
site.password_reset_token = token
site.save_changes validate: false
body = <<-EOT
Hello! This is the Neocities cat, and I have received a password reset request for your e-mail address.
Hello! This is the Penelope the Neocities cat, and I have received a password reset request for your e-mail address.
Go to this URL to reset your password: https://neocities.org/password_reset_confirm?username=#{Rack::Utils.escape(site.username)}&token=#{token}
Go to this URL to reset your password: https://neocities.org/password_reset_confirm?username=#{Rack::Utils.escape(site.username)}&token=#{Rack::Utils.escape(token)}
This link will expire in 24 hours.
If you didn't request this password reset, you can ignore it. Or hide under a bed. Or take a nap. Your call.
Meow,
the Neocities Cat
Penelope
EOT
body.strip!
@ -42,7 +44,7 @@ the Neocities Cat
end
end
flash[:success] = 'If your email was valid (and used by a site), the Neocities Cat will send an e-mail to your account with password reset instructions.'
flash[:success] = "We sent an e-mail with password reset instructions. Check your spam folder if you don't see it in your inbox."
redirect '/'
end
@ -61,7 +63,20 @@ get '/password_reset_confirm' do
redirect '/'
end
reset_site.password_reset_token = nil
timestamp = Time.at(reset_site.password_reset_token.split('-').last.to_i)
if Time.now.to_i - timestamp.to_i > Site::PASSWORD_RESET_EXPIRATION_TIME
flash[:error] = 'Token has expired.'
redirect '/'
end
if reset_site.is_deleted
unless reset_site.undelete!
flash[:error] = "Sorry, we cannot restore this account."
redirect '/'
end
end
reset_site.password_reset_confirmed = true
reset_site.save_changes

View file

@ -1,3 +1,6 @@
require 'socket'
require 'ipaddr'
get '/settings/?' do
require_login
@site = parent_site
@ -15,11 +18,19 @@ def require_ownership_for_settings
end
end
get '/settings/invoices/?' do
require_login
@title = 'Invoices'
@invoices = parent_site.stripe_customer_id ? Stripe::Invoice.list(customer: parent_site.stripe_customer_id) : []
erb :'settings/invoices'
end
get '/settings/:username/?' do |username|
# This is for the email_unsubscribe below
pass if Site.select(:id).where(username: username).first.nil?
require_login
require_ownership_for_settings
@title = "Site settings for #{username}"
erb :'settings/site'
end
@ -53,8 +64,7 @@ post '/settings/:username/profile' do
@site.update(
profile_comments_enabled: params[:site][:profile_comments_enabled],
profile_enabled: params[:site][:profile_enabled],
ipfs_archiving_enabled: params[:site][:ipfs_archiving_enabled]
profile_enabled: params[:site][:profile_enabled]
)
flash[:success] = 'Profile settings changed.'
redirect "/settings/#{@site.username}#profile"
@ -89,8 +99,8 @@ post '/settings/:username/change_name' do
}
old_site.delete_all_thumbnails_and_screenshots
old_site.delete_all_cache
@site.delete_all_cache
old_site.purge_all_cache
@site.purge_all_cache
@site.regenerate_thumbnails_and_screenshots
flash[:success] = "Site/user name has been changed. You will need to use this name to login, <b>don't forget it!</b>"
@ -144,17 +154,19 @@ post '/settings/:username/custom_domain' do
end
begin
Socket.gethostbyname @site.values[:domain]
rescue SocketError => e
if e.message =~ /name or service not known/i
flash[:error] = 'Domain needs to be valid and already registered.'
addr = IPAddr.new @site.values[:domain]
if addr.ipv4? || addr.ipv6?
flash[:error] = 'IP addresses are not allowed. Please enter a valid domain name.'
redirect "/settings/#{@site.username}#custom_domain"
elsif e.message =~ /No address associated with hostname/i
#flash[:error] = "The domain isn't setup to use Neocities yet, cannot add. Please make the A and CNAME record changes where you registered your domain."
#redirect "/settings/#{@site.username}#custom_domain"
else
raise e
end
rescue IPAddr::InvalidAddressError
end
begin
Socket.gethostbyname @site.values[:domain]
rescue SocketError, ResolutionError => e
flash[:error] = "The domain isn't setup to use Neocities yet, cannot add. Please make the A and CNAME record changes where you registered your domain."
redirect "/settings/#{@site.username}#custom_domain"
end
if @site.valid?
@ -174,15 +186,37 @@ post '/settings/:username/custom_domain' do
end
end
post '/settings/:username/bluesky_set_did' do
require_login
require_ownership_for_settings
# todo standards based validation
if params[:did].length > 50
flash[:error] = 'DID provided was too long'
elsif !params[:did].match(/^did:plc:([a-z|0-9)]+)$/)
flash[:error] = 'DID was invalid'
else
tmpfile = Tempfile.new 'atproto-did'
tmpfile.write params[:did]
tmpfile.close
@site.store_files [{filename: '.well-known/atproto-did', tempfile: tmpfile}]
$redis_proxy.hdel "dns-_atproto.#{@site.username}.neocities.org", 'TXT'
flash[:success] = 'DID set! You can now verify the handle on the Bluesky app.'
end
redirect "/settings/#{@site.username}#bluesky"
end
post '/settings/:username/generate_api_key' do
require_login
require_ownership_for_settings
is_new = current_site.api_key.nil?
current_site.generate_api_key!
is_new = @site.api_key.nil?
@site.generate_api_key!
msg = is_new ? "New API key has been generated." : "API key has been regenerated."
flash[:success] = msg
redirect "/settings/#{current_site.username}#api_key"
redirect "/settings/#{@site.username}#api_key"
end
post '/settings/change_password' do
@ -275,6 +309,22 @@ post '/settings/change_email_notification' do
redirect '/settings#email'
end
post '/settings/change_editor_settings' do
require_login
owner = current_site.owner
owner.editor_autocomplete_enabled = params[:editor_autocomplete_enabled]
owner.editor_font_size = params[:editor_font_size]
owner.editor_keyboard_mode = params[:editor_keyboard_mode]
owner.editor_tab_width = params[:editor_tab_width]
owner.editor_help_tooltips = params[:editor_help_tooltips]
owner.save_changes validate: false
@filename = params[:path]
redirect '/site_files/text_editor?filename=' + Rack::Utils.escape(@filename)
end
post '/settings/create_child' do
require_login
@ -323,10 +373,12 @@ post '/settings/update_card' do
begin
customer.sources.create source: params[:stripe_token]
rescue Stripe::InvalidRequestError => e
rescue Stripe::InvalidRequestError, Stripe::CardError => e
if e.message.match /cannot use a.+token more than once/
flash[:error] = 'Card is already being used.'
redirect '/settings#billing'
elsif e.message.match /Your card was declined/
flash[:error] = 'The card was declined. Please contact your bank.'
else
raise e
end

View file

@ -78,7 +78,3 @@ post '/signout' do
signout
redirect '/'
end
def signout
session[:id] = nil
end

View file

@ -1,13 +1,14 @@
get '/site/:username.rss' do |username|
site = Site[username: username]
halt 404 if site.nil? || (current_site && site.is_blocking?(current_site))
content_type :xml
site.to_rss.to_xml
site.to_rss
end
get '/site/:username/?' do |username|
site = Site[username: username]
# TODO: There should probably be a "this site was deleted" page.
not_found if site.nil? || site.is_banned || site.is_deleted
not_found if site.nil? || site.is_banned || site.is_deleted || (current_site && site.is_blocking?(current_site))
redirect '/' if site.is_education
@ -16,16 +17,19 @@ get '/site/:username/?' do |username|
@title = site.title
@page = params[:page]
@page = @page.to_i
@page = 1 if @page == 0
@page = 1 if @page.not_an_integer?
if params[:event_id]
not_found unless params[:event_id].is_integer?
event = Event.select(:id).where(id: params[:event_id]).first
not_found if params[:event_id].not_an_integer?
event = Event.where(id: params[:event_id]).exclude(is_deleted: true).first
not_found if event.nil?
event_site = event.site
event_actioning_site = event.actioning_site
not_found if current_site && event_site && event_site.is_blocking?(current_site)
not_found if current_site && event_actioning_site && event_actioning_site.is_blocking?(current_site)
events_dataset = Event.where(id: params[:event_id]).paginate(1, 1)
else
events_dataset = site.latest_events(@page, 10)
events_dataset = site.latest_events(@page, current_site)
end
@page_count = events_dataset.page_count || 1
@ -37,19 +41,11 @@ get '/site/:username/?' do |username|
erb :'site', locals: {site: site, is_current_site: site == current_site}
end
get '/site/:username/archives' do
@site = Site[username: params[:username]]
not_found if @site.nil? || @site.is_banned || @site.is_deleted || !@site.ipfs_archiving_enabled
@title = "Site archives for #{@site.title}"
@archives = @site.archives_dataset.limit(300).order(:updated_at.desc).all
erb :'site/archives'
end
MAX_STAT_POINTS = 30
get '/site/:username/stats' do
@default_stat_points = 7
@site = Site[username: params[:username]]
not_found if @site.nil? || @site.is_banned || @site.is_deleted
not_found if @site.nil? || @site.is_banned || @site.is_deleted || (current_site && @site.is_blocking?(current_site))
@title = "Site stats for #{@site.host}"
@ -90,7 +86,7 @@ get '/site/:username/stats' do
if @site.supporter?
unless params[:days].to_s == 'sincethebigbang'
if params[:days] && params[:days].to_i != 0
unless params[:days].not_an_integer?
stats_dataset = stats_dataset.limit params[:days]
else
params[:days] = @default_stat_points
@ -116,9 +112,7 @@ get '/site/:username/stats' do
end
if stats.length > MAX_STAT_POINTS
puts stats.length
stats = stats.select.with_index {|a, i| (i % (stats.length / MAX_STAT_POINTS.to_f).round) == 0}
puts stats.length
end
@stats[:stat_days] = stats
@ -137,16 +131,22 @@ end
get '/site/:username/follows' do |username|
@title = "Sites #{username} follows"
@site = Site[username: username]
not_found if @site.nil? || @site.is_banned || @site.is_deleted
@sites = @site.followings.collect {|f| f.site}
not_found if @site.nil? || @site.is_deleted || (current_site && (@site.is_blocking?(current_site) || current_site.is_blocking?(@site)))
params[:page] ||= "1"
@pagination_dataset = @site.followings_dataset.paginate(params[:page].to_i, Site::FOLLOW_PAGINATION_LIMIT)
erb :'site/follows'
end
get '/site/:username/followers' do |username|
@title = "Sites that follow #{username}"
@site = Site[username: username]
not_found if @site.nil? || @site.is_banned || @site.is_deleted
@sites = @site.follows.collect {|f| f.actioning_site}
not_found if @site.nil? || @site.is_deleted || (current_site && (@site.is_blocking?(current_site) || current_site.is_blocking?(@site)))
params[:page] ||= "1"
@pagination_dataset = @site.follows_dataset.paginate(params[:page].to_i, Site::FOLLOW_PAGINATION_LIMIT)
erb :'site/followers'
end
@ -155,6 +155,8 @@ post '/site/:username/comment' do |username|
site = Site[username: username]
redirect request.referer if current_site && (site.is_blocking?(current_site) || current_site.is_blocking?(site))
last_comment = site.profile_comments_dataset.order(:created_at.desc).first
if last_comment && last_comment.message == params[:message] && last_comment.created_at > 2.hours.ago
@ -183,6 +185,7 @@ post '/site/:site_id/toggle_follow' do |site_id|
require_login
content_type :json
site = Site[id: site_id]
return 403 if site.is_blocking?(current_site)
{result: (current_site.toggle_follow(site) ? 'followed' : 'unfollowed')}.to_json
end
@ -283,3 +286,102 @@ post '/site/:username/block' do |username|
redirect request.referer
end
end
get '/site/:username/unblock' do |username|
require_login
site = Site[username: username]
if site.nil? || current_site.id == site.id
redirect request.referer
end
current_site.unblock! site
redirect request.referer
end
get '/site/:username/confirm_phone' do
require_login
redirect '/' unless current_site.phone_verification_needed?
@title = 'Verify your Phone Number'
erb :'site/confirm_phone'
end
def restart_phone_verification
current_site.phone_verification_sent_at = nil
current_site.phone_verification_sid = nil
current_site.save_changes validate: false
redirect "/site/#{current_site.username}/confirm_phone"
end
post '/site/:username/confirm_phone' do
require_login
redirect '/' unless current_site.phone_verification_needed?
if params[:phone_intl]
phone = Phonelib.parse params[:phone_intl]
if !phone.valid?
flash[:error] = "Invalid phone number, please try again."
redirect "/site/#{current_site.username}/confirm_phone"
end
if phone.types.include?(:premium_rate) || phone.types.include?(:shared_cost)
flash[:error] = 'Neocities does not support this type of number, please use another number.'
redirect "/site/#{current_site.username}/confirm_phone"
end
current_site.phone_verification_sent_at = Time.now
current_site.phone_verification_attempts += 1
if current_site.phone_verification_attempts > Site::PHONE_VERIFICATION_LOCKOUT_ATTEMPTS
flash[:error] = 'You have exceeded the number of phone verification attempts allowed.'
redirect "/site/#{current_site.username}/confirm_phone"
end
current_site.save_changes validate: false
verification = $twilio.verify
.v2
.services($config['twilio_service_sid'])
.verifications
.create(to: phone.e164, channel: 'sms')
current_site.phone_verification_sid = verification.sid
current_site.save_changes validate: false
flash[:success] = 'Validation message sent! Check your phone and enter the code below.'
else
restart_phone_verification if current_site.phone_verification_sent_at < Time.now - Site::PHONE_VERIFICATION_EXPIRATION_TIME
minutes_remaining = ((current_site.phone_verification_sent_at - (Time.now - Site::PHONE_VERIFICATION_EXPIRATION_TIME))/60).round
begin
# Check code
vc = $twilio.verify
.v2
.services($config['twilio_service_sid'])
.verification_checks
.create(verification_sid: current_site.phone_verification_sid, code: params[:code])
# puts vc.status (pending if failed, approved if it passed)
if vc.status == 'approved'
current_site.phone_verified = true
current_site.save_changes validate: false
else
flash[:error] = "Code was not correct, please try again. If the phone number you entered was incorrect, you can re-enter the number after #{minutes_remaining} more minutes have passed."
end
rescue Twilio::REST::RestError => e
if e.message =~ /60202/
flash[:error] = "You have exhausted your check attempts. Please try again in #{minutes_remaining} minutes."
elsif e.message =~ /20404/ # Unable to create record
restart_phone_verification
else
raise e
end
end
end
# Will redirect to / automagically if phone was verified
redirect "/site/#{current_site.username}/confirm_phone"
end

View file

@ -14,7 +14,7 @@ post '/site_files/create' do
require_login
@errors = []
filename = params[:pagefilename] || params[:filename]
filename = params[:filename]
filename.gsub!(/[^a-zA-Z0-9_\-.]/, '')
@ -39,7 +39,7 @@ post '/site_files/create' do
extname = File.extname name
unless extname.match /^\.#{Site::EDITABLE_FILE_EXT}/i
unless extname.empty? || extname.match(/^\.#{Site::EDITABLE_FILE_EXT}/i)
flash[:error] = "Must be an editable text file type (#{Site::VALID_EDITABLE_EXTENSIONS.join(', ')})."
redirect redirect_uri
end
@ -52,7 +52,10 @@ post '/site_files/create' do
end
if extname.match(/^\.html|^\.htm/i)
begin
current_site.install_new_html_file name
rescue Sequel::UniqueConstraintViolation
end
else
file_path = current_site.files_path(name)
FileUtils.touch file_path
@ -75,7 +78,9 @@ post '/site_files/create' do
end
def file_upload_response(error=nil)
flash[:error] = error if error
if error
flash[:error] = error
end
if params[:from_button]
query_string = params[:dir] ? "?"+Rack::Utils.build_query(dir: params[:dir]) : ''
@ -88,75 +93,16 @@ end
def require_login_file_upload_ajax
file_upload_response 'You are not signed in!' unless signed_in?
file_upload_response 'Please contact support.' if banned?
end
post '/site_files/upload' do
if params[:filename]
require_login_file_upload_ajax
tempfile = Tempfile.new 'neocities_saving_file'
input = request.body.read
tempfile.set_encoding input.encoding
tempfile.write input
tempfile.close
params[:files] = [{filename: params[:filename], tempfile: tempfile}]
else
require_login
end
@errors = []
if params[:files].nil?
file_upload_response "Uploaded files were not seen by the server, cancelled. We don't know what's causing this yet. Please contact us so we can help fix it. Thanks!"
end
# For migration from original design.. some pages out there won't have the site_id param yet for a while.
site = params[:site_id].nil? ? current_site : Site[params[:site_id]]
unless site.owned_by?(current_site)
file_upload_response 'You do not have permission to save this file. Did you sign in as a different user?'
end
params[:files].each_with_index do |file,i|
dir_name = ''
dir_name = params[:dir] if params[:dir]
unless params[:file_paths].nil? || params[:file_paths].empty? || params[:file_paths].length == 0
file_path = params[:file_paths][i]
unless file_path.nil?
dir_name += '/' + Pathname(file_path).dirname.to_s
end
end
file[:filename] = "#{dir_name}/#{site.scrubbed_path file[:filename]}"
if current_site.file_size_too_large? file[:tempfile].size
file_upload_response "#{Rack::Utils.escape_html file[:filename]} is too large, upload cancelled."
end
if !site.okay_to_upload? file
file_upload_response %{#{Rack::Utils.escape_html file[:filename]}: file type (or content in file) is only supported by <a href="/supporter">supporter accounts</a>. <a href="/site_files/allowed_types">Why We Do This</a>}
end
end
uploaded_size = params[:files].collect {|f| f[:tempfile].size}.inject{|sum,x| sum + x }
if site.file_size_too_large? uploaded_size
file_upload_response "File(s) do not fit in your available free space, upload cancelled."
end
if site.too_many_files? params[:files].length
file_upload_response "Your site has exceeded the maximum number of files, please delete some files first."
end
results = site.store_files params[:files]
file_upload_response
end
post '/site_files/delete' do
require_login
path = HTMLEntities.new.decode params[:filename]
begin
current_site.delete_file path
rescue Sequel::NoExistingObject
# the deed was presumably already done
end
flash[:success] = "Deleted #{Rack::Utils.escape_html params[:filename]}."
dirname = Pathname(path).dirname
@ -171,12 +117,19 @@ post '/site_files/rename' do
new_path = HTMLEntities.new.decode params[:new_path]
site_file = current_site.site_files.select {|s| s.path == path}.first
escaped_path = Rack::Utils.escape_html path
escaped_new_path = Rack::Utils.escape_html new_path
if site_file.nil?
flash[:error] = "File #{escaped_path} does not exist."
else
res = site_file.rename new_path
if res.first == true
flash[:success] = "Renamed #{Rack::Utils.escape_html path} to #{Rack::Utils.escape_html new_path}"
flash[:success] = "Renamed #{escaped_path} to #{escaped_new_path}"
else
flash[:error] = "Failed to rename #{Rack::Utils.escape_html path} to #{Rack::Utils.escape_html new_path}: #{Rack::Utils.escape_html res.last}"
flash[:error] = "Failed to rename #{escaped_path} to #{escaped_new_path}: #{Rack::Utils.escape_html res.last}"
end
end
dirname = Pathname(path).dirname
@ -185,18 +138,36 @@ post '/site_files/rename' do
redirect "/dashboard#{dir_query}"
end
get '/site_files/:username.zip' do |username|
get '/site_files/download' do
require_login
if current_site.too_big_to_download?
flash[:error] = 'Cannot download site as zip as it is too large (or contains too many files)'
redirect '/dashboard'
if !current_site.dl_queued_at.nil? && current_site.dl_queued_at > 1.hour.ago
flash[:error] = 'Site downloads are currently limited to once per hour, please try again later.'
redirect request.referer
end
zipfile_path = current_site.files_zip
content_type 'application/octet-stream'
content_type 'application/zip'
attachment "neocities-#{current_site.username}.zip"
send_file zipfile_path
current_site.dl_queued_at = Time.now
current_site.save_changes validate: false
directory_path = current_site.files_path
stream do |out|
ZipTricks::Streamer.open(out) do |zip|
Dir["#{directory_path}/**/*"].each do |file|
next if File.directory?(file)
zip_path = file.sub("#{directory_path}/", '')
zip.write_stored_file(zip_path) do |file_writer|
File.open(file, 'rb') do |file|
IO.copy_stream(file, file_writer)
end
end
end
end
end
end
get %r{\/site_files\/download\/(.+)} do
@ -213,7 +184,16 @@ get %r{\/site_files\/text_editor\/(.+)} do
dont_browser_cache
@filename = params[:captures].first
redirect '/site_files/text_editor?filename=' + Rack::Utils.escape(@filename)
end
get '/site_files/text_editor' do
require_login
dont_browser_cache
@filename = params[:filename]
extname = File.extname @filename
@ace_mode = case extname
when /htm|html/ then 'html'
when /js/ then 'javascript'
@ -254,3 +234,37 @@ get '/site_files/mount_info' do
@title = 'Site Mount Information'
erb :'site_files/mount_info'
end
post '/site_files/chat' do
require_login
dont_browser_cache
headers 'X-Accel-Buffering' => 'no'
halt(403) unless parent_site.supporter?
# Ensure the request is treated as a stream
stream do |out|
url = 'https://api.anthropic.com/v1/messages'
headers = {
"anthropic-version" => "2023-06-01",
"anthropic-beta" => "messages-2023-12-15",
"content-type" => "application/json",
"x-api-key" => $config['anthropic_api_key']
}
body = {
model: "claude-3-haiku-20240307",
system: params[:system],
messages: JSON.parse(params[:messages]),
max_tokens: 4096,
temperature: 0.5,
stream: true
}.to_json
res = HTTP.headers(headers).post(url, body: body)
while(buffer = res.body.readpartial)
out << buffer
end
end
end

View file

@ -14,7 +14,6 @@ end
post '/supporter/update' do
require_login
plan_type = 'supporter'
if is_special_upgrade
@ -40,19 +39,29 @@ post '/supporter/update' do
customer.sources.create source: params[:stripe_token]
end
begin
subscription = customer.subscriptions.create plan: plan_type
rescue Stripe::CardError => e
flash[:error] = "Error: #{Rack::Utils.escape_html e.message}"
redirect '/supporter'
end
site.plan_ended = false
site.plan_type = plan_type
site.stripe_subscription_id = subscription.id
site.save_changes validate: false
else
begin
customer = Stripe::Customer.create(
source: params[:stripe_token],
description: "#{site.username} - #{site.id}",
email: site.email,
plan: plan_type
)
rescue Stripe::CardError => e
flash[:error] = "Error: #{Rack::Utils.escape_html e.message} This is likely caused by incorrect information, or an issue with your credit card. Please try again, or contact your bank."
redirect '/supporter'
end
site.stripe_customer_id = customer.id
site.stripe_subscription_id = customer.subscriptions.first.id
@ -74,7 +83,7 @@ post '/supporter/update' do
site.send_email(
subject: "[Neocities] You've become a supporter!",
body: Tilt.new('./views/templates/email_subscription.erb', pretty: true).render(
body: Tilt.new('./views/templates/email/subscription.erb', pretty: true).render(
self, {
username: site.username,
plan_name: Site::PLAN_FEATURES[params[:plan_type].to_sym][:name],
@ -93,6 +102,7 @@ post '/supporter/update' do
end
get '/supporter/thanks' do
@title = 'Supporter Confirmation'
require_login
erb :'supporter/thanks'
end

View file

@ -1,21 +0,0 @@
get '/surf/?' do
not_found # 404 for now
@page = params[:page].to_i || 1
params.delete 'tag' if params[:tag].nil? || params[:tag].strip.empty?
site_dataset = browse_sites_dataset
site_dataset = site_dataset.paginate @page, 1
@page_count = site_dataset.page_count || 1
@site = site_dataset.first
redirect "/browse?#{Rack::Utils.build_query params}" if @site.nil?
@title = "Surf Mode - #{@site.title}"
erb :'surf', layout: false
end
get '/surf/:username' do |username|
not_found # 404 for now
@site = Site.select(:id, :username, :title, :domain, :views, :stripe_customer_id).where(username: username).first
not_found if @site.nil?
@title = @site.title
not_found if @site.nil?
erb :'surf', layout: false
end

View file

@ -24,18 +24,29 @@ end
get '/tutorial/:section/?' do
require_login
not_found unless %w{html}.include?(params[:section])
redirect "/tutorial/#{params[:section]}/1"
end
get '/tutorial/:section/:page/?' do
require_login
@page = params[:page]
not_found if @page.to_i == 0
not_found unless %w{html css js}.include?(params[:section])
not_found unless @page.match?(/\A[1-9]\z|\A10\z/)
not_found unless %w{html}.include?(params[:section])
@section = params[:section]
@title = "#{params[:section].upcase} Tutorial - #{@page}/10"
if @page == '9'
unless csrf_safe?
signout
redirect '/'
end
current_site.tutorial_required = false
current_site.save_changes validate: false
end
erb "tutorial/layout".to_sym
end

View file

@ -143,7 +143,7 @@ def stripe_get_site_from_event(event)
site_where = {username: desc_split.first}
end
if desc_split.last.to_i == 0
if desc_split.last.not_an_integer?
site_where = {username: desc_split.first}
else
site_where = {id: desc_split.last}

View file

@ -16,41 +16,33 @@ end
def require_login
redirect '/' unless signed_in? && current_site
enforce_ban if banned?
signout if deleted?
end
def signed_in?
!session[:id].nil?
return false if current_site.nil?
true
end
def signout
@_site = nil
@_parent_site = nil
session[:id] = nil
end
def current_site
return nil if session[:id].nil?
@_site ||= Site[id: session[:id]]
@_parent_site ||= @_site.parent
if @_site.is_banned || @_site.is_deleted || (@_parent_site && (@_parent_site.is_banned || @_parent_site.is_deleted))
signout
end
@_site
end
def parent_site
return nil if current_site.nil?
current_site.parent? ? current_site : current_site.parent
end
def deleted?
return true if current_site && current_site.is_deleted
false
end
def banned?(ip_check=false)
return true if session[:banned]
return true if current_site && (current_site.is_banned || parent_site.is_banned)
return true if ip_check && Site.banned_ip?(request.ip)
false
end
def enforce_ban
signout
session[:banned] = true
redirect '/'
@_parent_site || current_site
end
def meta_robots(newtag=nil)
@ -104,12 +96,6 @@ def dont_browser_cache
@dont_browser_cache = true
end
def email_not_validated?
return false if current_site && current_site.created_at < Site::EMAIL_VALIDATION_CUTOFF_DATE
current_site && current_site.parent? && !current_site.is_education && !current_site.email_confirmed && !current_site.supporter?
end
def sanitize_comment(text)
Rinku.auto_link Sanitize.fragment(text), :all, 'target="_blank" rel="nofollow"'
end
@ -118,20 +104,32 @@ def flash_display(opts={})
erb :'_flash', layout: false, locals: {opts: opts}
end
def recaptcha_valid?
return true if ENV['RACK_ENV'] == 'test' || ENV['TRAVIS']
return false unless params[:'g-recaptcha-response']
resp = Net::HTTP.get URI(
'https://www.google.com/recaptcha/api/siteverify?'+
Rack::Utils.build_query(
secret: $config['recaptcha_private_key'],
response: params[:'g-recaptcha-response']
)
)
def hcaptcha_valid?
return true if ENV['RACK_ENV'] == 'test' || ENV['CI']
return false unless params[:'h-captcha-response']
if JSON.parse(resp)['success'] == true
resp = HTTP.get('https://hcaptcha.com/siteverify', params: {
secret: $config['hcaptcha_secret_key'],
response: params[:'h-captcha-response']
})
resp = JSON.parse resp
if resp['success'] == true
true
else
false
end
end
JS_ESCAPE_MAP = {"\\" => "\\\\", "</" => '<\/', "\r\n" => '\n', "\n" => '\n', "\r" => '\n', '"' => '\\"', "'" => "\\'", "`" => "\\`", "$" => "\\$"}
def escape_javascript(javascript)
javascript = javascript.to_s
if javascript.empty?
result = ""
else
result = javascript.gsub(/(\\|<\/|\r\n|\342\200\250|\342\200\251|[\n\r"']|[`]|[$])/u, JS_ESCAPE_MAP)
end
result
end

107
config.ru
View file

@ -1,6 +1,9 @@
require 'rubygems'
require './app.rb'
require 'sidekiq/web'
require 'airbrake/sidekiq'
use Airbrake::Rack::Middleware
map('/') do
use(Rack::Cache,
@ -13,72 +16,92 @@ end
map '/webdav' do
use Rack::Auth::Basic do |username, password|
@site = Site.get_site_from_login username, password
@site = Site.get_site_from_login(username, password)
@site ? true : false
end
run lambda { |env|
if env['REQUEST_METHOD'] == 'PUT'
request_method = env['REQUEST_METHOD']
path = env['PATH_INFO']
tmpfile = Tempfile.new 'davfile', encoding: 'binary'
tmpfile.write env['rack.input'].read
tmpfile.close
if @site.file_size_too_large? tmpfile.size
return [507, {}, ['']]
unless @site.owner.supporter?
return [
402,
{
'Content-Type' => 'application/xml',
'X-Upgrade-Required' => 'https://neocities.org/supporter'
},
[
<<~XML
<?xml version="1.0" encoding="utf-8"?>
<error xmlns="DAV:">
<message>WebDAV access requires a supporter account.</message>
</error>
XML
]
]
end
# if Site.valid_file_type?(filename: path, tempfile: tmpfile)
if @site.okay_to_upload? filename: path, tempfile: tmpfile
@site.store_files [{filename: path, tempfile: tmpfile}]
case request_method
when 'OPTIONS'
return [200, {'Allow' => 'OPTIONS, GET, HEAD, PUT, DELETE, PROPFIND, MKCOL, MOVE', 'DAV' => '1,2'}, ['']]
when 'PUT'
tmpfile = Tempfile.new('davfile', encoding: 'binary')
tmpfile.write(env['rack.input'].read)
tmpfile.close
return [507, {}, ['']] if @site.file_size_too_large?(tmpfile.size)
if @site.okay_to_upload?(filename: path, tempfile: tmpfile)
@site.store_files([{ filename: path, tempfile: tmpfile }])
return [201, {}, ['']]
else
return [415, {}, ['']]
end
end
if env['REQUEST_METHOD'] == 'MKCOL'
@site.create_directory env['PATH_INFO']
when 'MKCOL'
@site.create_directory(path)
return [201, {}, ['']]
end
if env['REQUEST_METHOD'] == 'MOVE'
when 'MOVE'
destination = env['HTTP_DESTINATION'][/\/webdav(.+)$/i, 1]
return [400, {}, ['Bad Request']] unless destination
destination = env['HTTP_DESTINATION'].match(/^.+\/webdav(.+)$/i).captures.first
env['PATH_INFO'] = env['PATH_INFO'][1..env['PATH_INFO'].length] if env['PATH_INFO'][0] == '/'
site_file = @site.site_files.select {|s| s.path == env['PATH_INFO']}.first
res = site_file.rename destination
path.sub!(/^\//, '') # Remove leading slash if present
site_file = @site.site_files.find { |s| s.path == path }
return [404, {}, ['']] unless site_file
site_file.rename(destination)
return [201, {}, ['']]
end
if env['REQUEST_METHOD'] == 'COPY'
return [501, {}, ['']]
end
if env['REQUEST_METHOD'] == 'LOCK'
return [501, {}, ['']]
end
if env['REQUEST_METHOD'] == 'UNLOCK'
return [501, {}, ['']]
end
if env['REQUEST_METHOD'] == 'PROPPATCH'
return [501, {}, ['']]
end
if env['REQUEST_METHOD'] == 'DELETE'
@site.delete_file env['PATH_INFO']
when 'DELETE'
@site.delete_file(path)
return [201, {}, ['']]
else
unless ['PROPFIND', 'GET', 'HEAD'].include? request_method
return [501, {}, ['Not Implemented']]
end
res = DAV4Rack::Handler.new(
env['PATH_INFO'] = "/#{@site.scrubbed_path(path)}" unless path.empty?
# Terrible hack to fix WebDAV for the VSC plugin
if env['CONTENT_LENGTH'] == "0"
env['rack.input'] = StringIO.new('<?xml version="1.0" encoding="utf-8"?>
<propfind xmlns="DAV:"><prop>
<getcontentlength xmlns="DAV:"/>
<getlastmodified xmlns="DAV:"/>
<resourcetype xmlns="DAV:"/>
</prop></propfind>')
env['CONTENT_LENGTH'] = env['rack.input'].length.to_s
end
DAV4Rack::Handler.new(
root: @site.files_path,
root_uri_path: '/webdav'
).call(env)
end
}
end
@ -88,7 +111,7 @@ map '/sidekiq' do
username == $config['sidekiq_user'] && password == $config['sidekiq_pass']
end
use Rack::Session::Cookie, key: 'sidekiq.session', secret: $config['session_secret']
use Rack::Session::Cookie, key: 'sidekiq.session', secret: Base64.strict_decode64($config['session_secret'])
use Rack::Protection::AuthenticityToken
run Sidekiq::Web
end

31
config.yml.ci Normal file
View file

@ -0,0 +1,31 @@
database: 'postgres://postgres:citestpassword@localhost/ci_test'
database_pool: 1
session_secret: 'SSBqdXN0IHdhbnRlZCB0byBzZWUgd2hhdCB5b3UgbG9va2VkIGxpa2UgaW4gYSBkcmVzcywgRGFkZSBNdXJwaHk='
email_unsubscribe_token: "somethingrandomderrrrp"
paypal_api_username: derp
paypal_api_password: ing
paypal_api_signature: tonz
logs_path: "/tmp/neocitiestestlogs"
letsencrypt_key: ./tests/files/letsencrypt.key
letsencrypt_endpoint: https://acme-staging.api.letsencrypt.org/
proxy_ips:
- 10.0.0.1
- 10.0.0.2
education_tag_whitelist:
- mrteacher
stop_forum_spam_api_key: testkey
screenshot_urls:
- http://screenshots:derp@screenshotssite.com
cache_control_ips:
- 1.2.3.4
- 4.5.6.7
hcaptcha_site_key: "10000000-ffff-ffff-ffff-000000000001"
hcaptcha_secret_key: "0x0000000000000000000000000000000000000000"
twilio_account_sid: ACEDERPDERP
twilio_auth_token: derpderpderp
twilio_service_sid: VADERPDERPDERP
minfraud_account_id: 696969420
minfraud_license_key: DERPDERPDERP
google_custom_search_key: herpderp
google_custom_search_cx: herpderp
google_custom_search_query_limit: 69

View file

@ -2,9 +2,9 @@ development:
database: 'postgres://localhost/neocities'
database_pool: 1
redis_url: "redis://localhost"
session_secret: "SECRET GOES HERE"
recaptcha_public_key: "6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI"
recaptcha_private_key: "6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe"
session_secret: "SSBqdXN0IHdhbnRlZCB0byBzZWUgd2hhdCB5b3UgbG9va2VkIGxpa2UgaW4gYSBkcmVzcywgRGFkZSBNdXJwaHk="
hcaptcha_site_key: "10000000-ffff-ffff-ffff-000000000001"
hcaptcha_secret_key: "0x0000000000000000000000000000000000000000"
sidekiq_user: "ENTER USER HERE"
sidekiq_pass: "ENTER PASS HERE"
stripe_publishable_key: "ENTER KEY HERE"
@ -18,19 +18,25 @@ development:
paypal_api_signature: tonz
letsencrypt_key: ./tests/files/letsencrypt.key
letsencrypt_endpoint: https://acme-staging.api.letsencrypt.org/
minfraud_account_id: 696969420
minfraud_license_key: DERPDERPDERP
proxy_ips:
- 10.0.0.1
- 10.0.0.2
education_tag_whitelist:
- mrteacher
screenshots_url: http://screenshots:derp@127.0.0.1:12345
screenshot_urls:
- http://screenshots:derp@127.0.0.1:12345
stop_forum_spam_api_key: testkey
google_custom_search_key: herpderp
google_custom_search_cx: herpderp
google_custom_search_query_limit: 69
test:
database: 'postgres://localhost/neocities_test'
database_pool: 1
session_secret: "SECRET GOES HERE"
recaptcha_public_key: "ENTER PUBLIC KEY HERE"
recaptcha_private_key: "ENTER PRIVATE KEY HERE"
session_secret: "SSBqdXN0IHdhbnRlZCB0byBzZWUgd2hhdCB5b3UgbG9va2VkIGxpa2UgaW4gYSBkcmVzcywgRGFkZSBNdXJwaHk="
hcaptcha_site_key: "10000000-ffff-ffff-ffff-000000000001"
hcaptcha_secret_key: "0x0000000000000000000000000000000000000000"
sidekiq_user: "ENTER USER HERE"
sidekiq_pass: "ENTER PASS HERE"
stripe_publishable_key: "ENTER KEY HERE"
@ -49,7 +55,16 @@ test:
education_tag_whitelist:
- mrteacher
stop_forum_spam_api_key: testkey
screenshots_url: http://screenshots:derp@screenshotssite.com
screenshot_urls:
- http://screenshots:derp@screenshotssite.com
cache_control_ips:
- 1.2.3.4
- 4.5.6.7
twilio_account_sid: ACEDERPDERP
twilio_auth_token: derpderpderp
twilio_service_sid: VADERPDERPDERP
minfraud_account_id: 696969420
minfraud_license_key: DERPDERPDERP
google_custom_search_key: herpderp
google_custom_search_cx: herpderp
google_custom_search_query_limit: 69

View file

@ -1,23 +0,0 @@
database: 'postgres://postgres@localhost/travis_ci_test'
database_pool: 1
session_secret: 's3cr3t'
recaptcha_public_key: '1234'
recaptcha_private_key: '5678'
email_unsubscribe_token: "somethingrandomderrrrp"
paypal_api_username: derp
paypal_api_password: ing
paypal_api_signature: tonz
logs_path: "/tmp/neocitiestestlogs"
letsencrypt_key: ./tests/files/letsencrypt.key
letsencrypt_endpoint: https://acme-staging.api.letsencrypt.org/
proxy_ips:
- 10.0.0.1
- 10.0.0.2
education_tag_whitelist:
- mrteacher
stop_forum_spam_api_key: testkey
screenshot_urls:
- http://screenshots:derp@screenshotssite.com
cache_control_ips:
- 1.2.3.4
- 4.5.6.7

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
RubyVM::YJIT.enable
ENV['RACK_ENV'] ||= 'development'
ENV['TZ'] = 'UTC'
DIR_ROOT = File.expand_path File.dirname(__FILE__)
@ -11,14 +13,21 @@ require 'logger'
Bundler.require
Bundler.require :development if ENV['RACK_ENV'] == 'development'
require 'tilt/erubis'
require 'active_support/core_ext/integer/time'
require 'tilt/erubi'
require 'active_support'
require 'active_support/time'
class File
def self.exists?(val)
self.exist?(val)
end
end
Dir['./ext/**/*.rb'].each {|f| require f}
# :nocov:
if ENV['TRAVIS']
$config = YAML.load_file File.join(DIR_ROOT, 'config.yml.travis')
if ENV['CI']
$config = YAML.load_file File.join(DIR_ROOT, 'config.yml.ci')
else
begin
$config = YAML.load_file(File.join(DIR_ROOT, 'config.yml'))[ENV['RACK_ENV']]
@ -33,6 +42,7 @@ DB = Sequel.connect $config['database'], sslmode: 'disable', max_connections: $c
DB.extension :pagination
DB.extension :auto_literal_strings
Sequel.split_symbols = true
Sidekiq.strict_args!(false)
require 'will_paginate/sequel'
@ -47,9 +57,13 @@ end
=end
# :nocov:
Sidekiq::Logging.logger = nil unless ENV['RACK_ENV'] == 'production'
unless ENV['RACK_ENV'] == 'production'
Sidekiq.configure_server do |config|
config.logger = nil
end
end
sidekiq_redis_config = {namespace: 'neocitiesworker'}
sidekiq_redis_config = {}
sidekiq_redis_config[:url] = $config['sidekiq_url'] if $config['sidekiq_url']
# :nocov:
@ -114,14 +128,6 @@ Dir.glob('workers/*.rb').each {|w| require File.join(DIR_ROOT, "/#{w}") }
DB.loggers << Logger.new(STDOUT) if ENV['RACK_ENV'] == 'development'
Mail.defaults do
#options = { :address => "smtp.gmail.com",
# :port => 587,
# :domain => 'your.host.name',
# :user_name => '<username>',
# :password => '<password>',
# :authentication => 'plain',
# :enable_starttls_auto => true }
options = {}
delivery_method :sendmail, options
end
@ -163,3 +169,30 @@ $gandi = Gandi::Session.new $config['gandi_api_key'], gandi_opts
$image_optim = ImageOptim.new pngout: false, svgo: false
Money.locale_backend = nil
Money.default_currency = Money::Currency.new("USD")
Money.rounding_mode = BigDecimal::ROUND_HALF_UP
$twilio = Twilio::REST::Client.new $config['twilio_account_sid'], $config['twilio_auth_token']
Minfraud.configure do |c|
c.account_id = $config['minfraud_account_id']
c.license_key = $config['minfraud_license_key']
c.enable_validation = true
end
Airbrake.configure do |c|
c.project_id = $config['airbrake_project_id']
c.project_key = $config['airbrake_project_key']
end
Airbrake.add_filter do |notice|
if notice[:params][:password]
# Filter out password.
notice[:params][:password] = '[Filtered]'
end
notice.ignore! if notice.stash[:exception].is_a?(Sinatra::NotFound)
end
Airbrake.add_filter Airbrake::Sidekiq::RetryableJobsFilter.new

View file

@ -6,4 +6,8 @@ class NilClass
def blank?
true
end
def not_an_integer?
true
end
end

View file

@ -1,49 +0,0 @@
class BitcoinValidator
class << self
def address_version
"00"
end
def p2sh_version
"05"
end
def valid_address?(address)
hex = decode_base58(address) rescue nil
return false unless hex && hex.bytesize == 50
return false unless [address_version, p2sh_version].include?(hex[0...2])
base58_checksum?(address)
end
def decode_base58(base58_val)
s = base58_to_int(base58_val).to_s(16); s = (s.bytesize.odd? ? '0'+s : s)
s = '' if s == '00'
leading_zero_bytes = (base58_val.match(/^([1]+)/) ? $1 : '').size
s = ("00"*leading_zero_bytes) + s if leading_zero_bytes > 0
s
end
def base58_checksum?(base58)
hex = decode_base58(base58) rescue nil
return false unless hex
checksum( hex[0...42] ) == hex[-8..-1]
end
def checksum(hex)
b = [hex].pack("H*") # unpack hex
Digest::SHA256.hexdigest( Digest::SHA256.digest(b) )[0...8]
end
def base58_to_int(base58_val)
alpha = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
int_val, base = 0, alpha.size
base58_val.reverse.each_char.with_index do |char,index|
raise ArgumentError, 'Value not a valid Base58 String.' unless char_index = alpha.index(char)
int_val += char_index*(base**index)
end
int_val
end
end
end

View file

@ -76,4 +76,8 @@ class Numeric
def to_space_pretty
to_bytes_pretty
end
def not_an_integer?
!self.integer?
end
end

View file

@ -0,0 +1,5 @@
class Sinatra::IndifferentHash
def not_an_integer?
true
end
end

View file

@ -12,12 +12,15 @@ class String
gsub /^#{scan(/^\s*/).min_by{|l|l.length}}/, ""
end
def is_integer?
true if Integer(self) rescue false
end
def blank?
return true if self == ''
false
end
def not_an_integer?
Integer(self)
false
rescue ArgumentError
true
end
end

View file

@ -1,6 +1,6 @@
Sequel.migration do
change do
alter_table(:events) { add_index :created_at }
#alter_table(:events) { add_index :created_at }
alter_table(:sites) { add_index :updated_at }
alter_table(:comment_likes) { add_index :comment_id }
alter_table(:comment_likes) { add_index :actioning_site_id }

View file

@ -1,13 +1,13 @@
Sequel.migration do
up {
DB['create index stat_referrers_hash_multi on stat_referrers (site_id, md5(url))'].first
DB.add_index :stat_locations, :site_id
DB.add_index :stat_paths, :site_id
#DB.add_index :stat_locations, :site_id
#DB.add_index :stat_paths, :site_id
}
down {
DB['drop index stat_referrers_hash_multi'].first
DB.drop_index :stat_locations, :site_id
DB.drop_index :stat_paths, :site_id
#DB.drop_index :stat_locations, :site_id
#DB.drop_index :stat_paths, :site_id
}
end

View file

@ -1,9 +1,9 @@
Sequel.migration do
up {
DB.add_index :stat_referrers, :site_id
#DB.add_index :stat_referrers, :site_id
}
down {
DB.drop_index :stat_referrers, :site_id
#DB.drop_index :stat_referrers, :site_id
}
end

View file

@ -1,13 +1,13 @@
Sequel.migration do
up {
%i{stat_referrers stat_locations stat_paths}.each do |t|
DB.add_index t, :created_at
end
#%i{stat_referrers stat_locations stat_paths}.each do |t|
# DB.add_index t, :created_at
#end
}
down {
%i{stat_referrers stat_locations stat_paths}.each do |t|
DB.drop_index t, :created_at
end
#%i{stat_referrers stat_locations stat_paths}.each do |t|
# DB.drop_index t, :created_at
#end
}
end

View file

@ -1,9 +1,9 @@
Sequel.migration do
up {
DB.add_index :sites, :parent_site_id
#DB.add_index :sites, :parent_site_id
}
down {
DB.drop_index :sites, :parent_site_id
#DB.drop_index :sites, :parent_site_id
}
end

View file

@ -1,9 +1,9 @@
Sequel.migration do
up {
DB.add_index :sites, :featured_at
#DB.add_index :sites, :featured_at
}
down {
DB.drop_index :sites, :featured_at
#DB.drop_index :sites, :featured_at
}
end

View file

@ -0,0 +1,9 @@
Sequel.migration do
up {
DB.add_index :sites, :api_key
}
down {
DB.drop_index :sites, :api_key
}
end

View file

@ -0,0 +1,9 @@
Sequel.migration do
up {
DB.add_index :sites, :domain
}
down {
DB.drop_index :sites, :domain
}
end

View file

@ -0,0 +1,9 @@
Sequel.migration do
up {
DB.add_column :sites, :dl_queued_at, Time
}
down {
DB.drop_column :sites, :dl_queued_at
}
end

View file

@ -0,0 +1,15 @@
Sequel.migration do
up {
DB.add_column :sites, :phone_verification_required, :boolean, default: false
DB.add_column :sites, :phone_verified, :boolean, default: false
DB.add_column :sites, :phone_verification_sid, :text
DB.add_column :sites, :phone_verification_sent_at, :time
}
down {
DB.drop_column :sites, :phone_verification_required
DB.drop_column :sites, :phone_verified
DB.drop_column :sites, :phone_verification_sid
DB.drop_column :sites, :phone_verification_sent_at
}
end

View file

@ -0,0 +1,11 @@
Sequel.migration do
up {
DB.drop_column :sites, :phone_verification_sent_at
DB.add_column :sites, :phone_verification_sent_at, Time
}
down {
DB.drop_column :sites, :phone_verification_sent_at
DB.add_column :sites, :phone_verification_sent_at, :time
}
end

View file

@ -0,0 +1,9 @@
Sequel.migration do
up {
DB.add_column :sites, :phone_verification_attempts, :integer, default: 0
}
down {
DB.drop_column :sites, :phone_verification_attempts
}
end

View file

@ -0,0 +1,17 @@
Sequel.migration do
up {
DB.drop_table :archives
DB.drop_column :sites, :ipfs_archiving_enabled
}
down {
DB.create_table! :archives do
Integer :site_id, index: true
String :ipfs_hash
DateTime :updated_at, index: true
unique [:site_id, :ipfs_hash]
end
DB.add_column :sites, :ipfs_archiving_enabled, :boolean, default: false
}
end

View file

@ -0,0 +1,9 @@
Sequel.migration do
up {
DB.add_index :events, [:created_at, :site_id, :site_change_id, :is_deleted], name: :events_rss_index, order: {created_at: :desc}
}
down {
DB.drop_index :events, :rss
}
end

View file

@ -0,0 +1,9 @@
Sequel.migration do
up {
DB.add_column :sites, :tutorial_required, :boolean, default: false
}
down {
DB.drop_column :sites, :tutorial_required
}
end

View file

@ -0,0 +1,15 @@
Sequel.migration do
up {
alter_table(:sites) do
set_column_type :hits, :bigint
set_column_type :views, :bigint
end
}
down {
alter_table(:sites) do
set_column_type :hits, Integer
set_column_type :views, Integer
end
}
end

View file

@ -0,0 +1,13 @@
Sequel.migration do
up {
alter_table(:sites) do
set_column_type :score, :real
end
}
down {
alter_table(:sites) do
set_column_type :score, :decimal
end
}
end

View file

@ -0,0 +1,13 @@
Sequel.migration do
up {
alter_table(:tags) do
add_unique_constraint :name
end
}
down {
alter_table(:tags) do
drop_constraint :tags_name_key
end
}
end

View file

@ -0,0 +1,15 @@
Sequel.migration do
up {
DB.add_column :sites, :autocomplete_enabled, :boolean, default: false
DB.add_column :sites, :editor_font_size, :int, default: 14
DB.add_column :sites, :keyboard_mode, :int, default: 0
DB.add_column :sites, :tab_width, :int, default: 2
}
down {
DB.drop_column :sites, :autocomplete_enabled
DB.drop_column :sites, :editor_font_size
DB.drop_column :sites, :keyboard_mode
DB.drop_column :sites, :tab_width
}
end

View file

@ -0,0 +1,16 @@
Sequel.migration do
up {
DB.rename_column :sites, :autocomplete_enabled, :editor_autocomplete_enabled
DB.rename_column :sites, :keyboard_mode, :editor_keyboard_mode
DB.rename_column :sites, :tab_width, :editor_tab_width
DB.drop_column :sites, :editor_keyboard_mode
DB.add_column :sites, :editor_keyboard_mode, String, size: 10
}
down {
DB.rename_column :sites, :editor_autocomplete_enabled, :autocomplete_enabled
DB.rename_column :sites, :editor_tab_width, :tab_width
DB.drop_column :sites, :editor_keyboard_mode
DB.add_column :sites, :keyboard_mode, :int, default: 0
}
end

View file

@ -0,0 +1,9 @@
Sequel.migration do
up {
DB.add_column :sites, :editor_help_tooltips, :boolean, default: false
}
down {
DB.drop_column :sites, :editor_help_tooltips
}
end

View file

@ -1,38 +0,0 @@
require 'base32'
class Archive < Sequel::Model
many_to_one :site
set_primary_key [:site_id, :ipfs_hash]
unrestrict_primary_key
MAXIMUM_ARCHIVES_PER_SITE = 10
ARCHIVE_WAIT_TIME = 1.minute
def before_destroy
unpin
super
end
def unpin
# Not ideal. An SoA version is in progress.
if ENV['RACK_ENV'] == 'production' && $config['ipfs_ssh_host'] && $config['ipfs_ssh_user']
rbox = Rye::Box.new $config['ipfs_ssh_host'], :user => $config['ipfs_ssh_user']
rbox.disable_safe_mode
begin
response = rbox.execute "ipfs pin rm #{ipfs_hash}"
output_array = response
rescue => e
return true if e.message =~ /indirect pins cannot be removed directly/
ensure
rbox.disconnect
end
else
line = Terrapin::CommandLine.new('ipfs', 'pin rm :ipfs_hash')
response = line.run ipfs_hash: ipfs_hash
output_array = response.to_s.split("\n")
end
end
def url
"https://#{ipfs_hash}.ipfs.neocitiesops.net"
end
end

View file

@ -1,3 +1,4 @@
# frozen_string_literal: true
class Event < Sequel::Model
include Sequel::ParanoidDelete
@ -12,55 +13,32 @@ class Event < Sequel::Model
many_to_one :site
many_to_one :actioning_site, key: :actioning_site_id, class: :Site
DEFAULT_GLOBAL_LIMIT = 300
GLOBAL_VIEWS_MINIMUM = 5
GLOBAL_VIEWS_SITE_CHANGE_MINIMUM = 3_000
PAGINATION_LENGTH = 10
GLOBAL_PAGINATION_LENGTH = 20
GLOBAL_SCORE_LIMIT = 2
ACTIVITY_TAG_SCORE_LIMIT = 0.2
def undeleted_comments_count
comments_dataset.exclude(is_deleted: true).count
end
def undeleted_comments
comments_dataset.exclude(is_deleted: true).order(:created_at).all
def undeleted_comments(exclude_ids=nil)
ds = comments_dataset.exclude(is_deleted: true).order(:created_at)
if exclude_ids
ds = ds.exclude actioning_site_id: exclude_ids
end
ds.all
end
def self.news_feed_default_dataset
if SimpleCache.expired?(:excluded_actioning_site_ids)
res = DB[%{select distinct(actioning_site_id) from events join sites on actioning_site_id=sites.id where sites.is_banned='t' or sites.is_nsfw='t' or sites.is_deleted='t'}].all.collect {|r| r[:actioning_site_id]}
excluded_actioning_site_ids = SimpleCache.store :excluded_actioning_site_ids, res, 10.minutes
else
excluded_actioning_site_ids = SimpleCache.get :excluded_actioning_site_ids
end
ds = select_all(:events).
order(:created_at.desc).
join_table(:inner, :sites, id: :site_id).
exclude(Sequel.qualify(:sites, :is_deleted) => true).
exclude(Sequel.qualify(:events, :is_deleted) => true).
exclude(is_banned: true)
unless excluded_actioning_site_ids.empty?
return ds.where("actioning_site_id is null or actioning_site_id not in ?", excluded_actioning_site_ids)
end
ds
end
def self.global_dataset(current_page=1, limit=DEFAULT_GLOBAL_LIMIT)
news_feed_default_dataset.
paginate(current_page, 100).
exclude(is_nsfw: true).
exclude(is_crashing: true).
where{views > GLOBAL_VIEWS_MINIMUM}.
where(site_change_id: nil)
end
def self.global_site_changes_dataset
news_feed_default_dataset.
where{views > GLOBAL_VIEWS_SITE_CHANGE_MINIMUM}.
exclude(is_nsfw: true).
exclude(is_crashing: true).
exclude(site_change_id: nil)
select(:events.*).
left_join(:sites, id: :site_id).
left_join(Sequel[:sites].as(:actioning_sites), id: :events__actioning_site_id).
exclude(sites__is_deleted: true).
exclude(actioning_sites__is_deleted: true).
exclude(events__is_deleted: true).
where(follow_id: nil).
order(:events__created_at.desc)
end
def created_by?(site)
@ -105,4 +83,12 @@ class Event < Sequel::Model
true
end
end
def name
return 'follow' if follow_id
return 'tip' if tip_id
return 'tag' if tag_id
return 'site change' if site_change_id
return 'comment' if profile_comment_id
end
end

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,4 @@
# frozen_string_literal: true
class SiteChange < Sequel::Model
NEW_CHANGE_TIMEOUT = 3600 * 24 # 24 hours
many_to_one :site
@ -5,8 +6,8 @@ class SiteChange < Sequel::Model
one_to_one :site_change
one_to_many :site_change_files
def site_change_filenames(limit=4)
site_change_files_dataset.select(:filename).limit(limit).all.collect {|f| f.filename}.sort_by {|f| f.match('html') ? 0 : 1}
def site_change_filenames(limit=6)
site_change_files_dataset.select(:filename).limit(limit).order(:created_at.desc).all.collect {|f| f.filename}.sort_by {|f| f.match('html') ? 0 : 1}
end
def self.record(site, filename)

View file

@ -1,12 +1,26 @@
# frozen_string_literal: true
require 'sanitize'
class SiteFile < Sequel::Model
CLASSIFIER_LIMIT = 1_000_000.freeze
CLASSIFIER_WORD_LIMIT = 25.freeze
CLASSIFIER_LIMIT = 1_000_000
CLASSIFIER_WORD_LIMIT = 25
FILE_PATH_CHARACTER_LIMIT = 1200
FILE_NAME_CHARACTER_LIMIT = 200
unrestrict_primary_key
plugin :update_primary_key
many_to_one :site
def self.path_too_long?(filename)
return true if filename.length > FILE_PATH_CHARACTER_LIMIT
false
end
def self.name_too_long?(filename)
return true if filename.length > FILE_NAME_CHARACTER_LIMIT
false
end
def before_destroy
if is_directory
site.site_files_dataset.where(path: /^#{Regexp.quote path}\//, is_directory: true).all.each do |site_file|
@ -17,7 +31,10 @@ class SiteFile < Sequel::Model
end
site.site_files_dataset.where(path: /^#{Regexp.quote path}\//, is_directory: false).all.each do |site_file|
begin
site_file.destroy
rescue Sequel::NoExistingObject
end
end
begin
@ -44,6 +61,18 @@ class SiteFile < Sequel::Model
current_path = self.path
new_path = site.scrubbed_path new_path
if new_path.length > FILE_PATH_CHARACTER_LIMIT
return false, 'new path too long'
end
if File.basename(new_path).length > FILE_NAME_CHARACTER_LIMIT
return false, 'new filename too long'
end
if new_path == ''
return false, 'cannot rename to empty path'
end
if current_path == 'index.html'
return false, 'cannot rename or move root index.html'
end
@ -52,11 +81,18 @@ class SiteFile < Sequel::Model
return false, "#{is_directory ? 'directory' : 'file'} already exists"
end
unless is_directory
if is_directory
if new_path.match(/\.html?$/)
return false, 'directory name cannot end with .htm or .html'
end
else # a file
mime_type = Magic.guess_file_mime_type site.files_path(self.path)
extname = File.extname new_path
return false, 'unsupported file type' unless site.class.valid_file_mime_type_and_ext?(mime_type, extname)
unless site.supporter? || site.class.valid_file_mime_type_and_ext?(mime_type, extname)
return false, 'unsupported file type'
end
end
begin
@ -73,8 +109,6 @@ class SiteFile < Sequel::Model
DB.transaction do
self.path = new_path
self.save_changes
site.purge_cache current_path
site.purge_cache new_path
if is_directory
site_files_in_dir = site.site_files.select {|sf| sf.path =~ /^#{current_path}\//}
@ -87,6 +121,9 @@ class SiteFile < Sequel::Model
site.purge_cache site_file.path
site.purge_cache original_site_file_path
end
else
site.purge_cache new_path
site.purge_cache current_path
end
end
@ -99,7 +136,7 @@ class SiteFile < Sequel::Model
DB['update sites set space_used=space_used-? where id=?', size, site_id].first
end
site.delete_cache site.files_path(path)
site.purge_cache site.files_path(path)
SiteChangeFile.filter(site_id: site_id, filename: path).delete
end
end

View file

@ -1,3 +1,4 @@
# frozen_string_literal: true
require 'resolv'
require 'zlib'
@ -18,14 +19,14 @@ class Stat < Sequel::Model
].first
end
def parse_logfiles(path)
def parse_logfiles(logfiles_path)
total_site_stats = {}
cache_control_ips = $config['cache_control_ips']
Dir["#{path}/*.log.gz"].each do |log_path|
site_logs = {}
Dir["#{logfiles_path}/*.log.gz"].each do |log_path|
gzfile = File.open log_path, 'r'
logfile = Zlib::GzipReader.new gzfile
@ -50,7 +51,6 @@ class Stat < Sequel::Model
views: 0,
bandwidth: 0,
view_ips: [],
ips: [],
referrers: {},
paths: {}
} unless site_logs[log_time][username]
@ -83,36 +83,35 @@ class Stat < Sequel::Model
site_logs[log_time][username][:paths][path] ||= 0
site_logs[log_time][username][:paths][path] += 1
end
logfile.close
FileUtils.rm log_path
rescue => e
puts "Log parse exception: #{e.inspect}"
logfile.close
FileUtils.mv log_path, log_path.gsub('.log', '.brokenlog')
next
end
logfile.close
#FileUtils.rm log_path
end
site_logs.each do |log_time, usernames|
Site.select(:id, :username).where(username: usernames.keys).all.each do |site|
site_logs[log_time][site.username][:id] = site.id
usernames[site.username][:id] = site.id
end
usernames.each do |username, site_log|
DB.transaction do
DB['update sites set hits=hits+?, views=views+? where username=?',
site_log[:hits],
site_log[:views],
username
].first
next unless site_log[:id]
opts = {site_id: site_log[:id], created_at: log_time.to_date.to_s}
stat = nil
DB[:stats].lock('EXCLUSIVE') {
stat = Stat.select(:id).where(opts).first
stat = Stat.create opts if stat.nil?
}
DB['update sites set hits=hits+?, views=views+? where id=?',
site_log[:hits],
site_log[:views],
site_log[:id]
].first
DB[
'update stats set hits=hits+?, views=views+?, bandwidth=bandwidth+? where id=?',
@ -123,18 +122,11 @@ class Stat < Sequel::Model
].first
end
end
end
FileUtils.rm log_path
end
total_site_stats.each do |time, stats|
opts = {created_at: time.to_date.to_s}
DB[:stats].lock('EXCLUSIVE') {
stat = DailySiteStat.select(:id).where(opts).first
stat = DailySiteStat.create opts if stat.nil?
}
DB[
'update daily_site_stats set hits=hits+?, views=views+?, bandwidth=bandwidth+? where created_at=?',
@ -144,138 +136,6 @@ class Stat < Sequel::Model
time.to_date
].first
end
end
end
end
=begin
require 'io/extra'
require 'geoip'
# Note: This isn't really a class right now.
module Stat
class << self
def parse_logfiles(path)
Dir["#{path}/*.log"].each do |logfile_path|
parse_logfile logfile_path
FileUtils.rm logfile_path
end
end
def parse_logfile(path)
geoip = GeoIP.new GEOCITY_PATH
logfile = File.open path, 'r'
hits = []
while hit = logfile.gets
time, username, size, path, ip, referrer = hit.split ' '
site = Site.select(:id).where(username: username).first
next unless site
paths_dataset = StatsDB[:paths]
path_record = paths_dataset[name: path]
path_id = path_record ? path_record[:id] : paths_dataset.insert(name: path)
referrers_dataset = StatsDB[:referrers]
referrer_record = referrers_dataset[name: referrer]
referrer_id = referrer_record ? referrer_record[:id] : referrers_dataset.insert(name: referrer)
location_id = nil
if city = geoip.city(ip)
locations_dataset = StatsDB[:locations].select(:id)
location_hash = {country_code2: city.country_code2, region_name: city.region_name, city_name: city.city_name}
location = locations_dataset.where(location_hash).first
location_id = location ? location[:id] : locations_dataset.insert(location_hash)
end
hits << [site.id, referrer_id, path_id, location_id, size, time]
end
StatsDB[:hits].import(
[:site_id, :referrer_id, :path_id, :location_id, :bytes_sent, :logged_at],
hits
)
end
end
end
=begin
def parse_logfile(path)
hits = {}
visits = {}
visit_ips = {}
logfile = File.open path, 'r'
while hit = logfile.gets
time, username, size, path, ip, referrer = hit.split ' '
hits[username] ||= 0
hits[username] += 1
visit_ips[username] = [] if !visit_ips[username]
unless visit_ips[username].include? ip
visits[username] ||= 0
visits[username] += 1
visit_ips[username] << ip
end
end
logfile.close
hits.each do |username,hitcount|
DB['update sites set hits=hits+? where username=?', hitcount, username].first
end
visits.each do |username,visitcount|
DB['update sites set views=views+? where username=?', visitcount, username].first
end
end
end
=end
=begin
def self.parse(logfile_path)
hits = {}
visits = {}
visit_ips = {}
logfile = File.open logfile_path, 'r'
while hit = logfile.gets
time, username, size, path, ip = hit.split ' '
hits[username] ||= 0
hits[username] += 1
visit_ips[username] = [] if !visit_ips[username]
unless visit_ips[username].include?(ip)
visits[username] ||= 0
visits[username] += 1
visit_ips[username] << ip
end
end
logfile.close
hits.each do |username,hitcount|
DB['update sites set hits=hits+? where username=?', hitcount, username].first
end
visits.each do |username,visitcount|
DB['update sites set views=views+? where username=?', visitcount, username].first
end
end
=end

View file

@ -1,3 +1,4 @@
# frozen_string_literal: true
require 'geoip'
class StatLocation < Sequel::Model

View file

@ -1,3 +1,4 @@
# frozen_string_literal: true
class StatPath < Sequel::Model
RETAINMENT_DAYS = 7

View file

@ -1,3 +1,4 @@
# frozen_string_literal: true
class StatReferrer < Sequel::Model
many_to_one :site
RETAINMENT_DAYS = 7

View file

@ -1,6 +1,8 @@
# frozen_string_literal: true
class Tag < Sequel::Model
NAME_LENGTH_MAX = 25
NAME_WORDS_MAX = 1
INVALID_TAG_REGEX = /[^a-zA-Z0-9 ]/
many_to_many :sites
def before_create
@ -15,7 +17,11 @@ class Tag < Sequel::Model
def self.create_unless_exists(name)
name = clean_name name
return nil if name == '' || name.nil?
begin
dataset.filter(name: name).first || create(name: name)
rescue Sequel::UniqueConstraintViolation
dataset.filter(name: name).first
end
end
def self.autocomplete(name, limit=3)
@ -24,7 +30,7 @@ class Tag < Sequel::Model
def self.popular_names(limit=10)
cache_key = "tag_popular_names_#{limit}".to_sym
cache = $redis_cache[cache_key]
cache = $redis_cache.get cache_key
if cache.nil?
res = DB["select tags.name,count(*) as c from sites_tags inner join tags on tags.id=sites_tags.tag_id where tags.name != '' and tags.is_nsfw='f' group by tags.name having count(*) > 1 order by c desc LIMIT ?", limit].all
$redis_cache.set cache_key, res.to_msgpack

File diff suppressed because one or more lines are too long

View file

@ -1,2 +0,0 @@
.skeuocard.js *{-moz-box-sizing:border-box;-webkit-box-sizing:border-box;box-sizing:border-box}.skeuocard.js html,.skeuocard.js body,.skeuocard.js div,.skeuocard.js span,.skeuocard.js applet,.skeuocard.js object,.skeuocard.js iframe,.skeuocard.js h1,.skeuocard.js h2,.skeuocard.js h3,.skeuocard.js h4,.skeuocard.js h5,.skeuocard.js h6,.skeuocard.js p,.skeuocard.js blockquote,.skeuocard.js pre,.skeuocard.js a,.skeuocard.js abbr,.skeuocard.js acronym,.skeuocard.js address,.skeuocard.js big,.skeuocard.js cite,.skeuocard.js code,.skeuocard.js del,.skeuocard.js dfn,.skeuocard.js em,.skeuocard.js img,.skeuocard.js ins,.skeuocard.js kbd,.skeuocard.js q,.skeuocard.js s,.skeuocard.js samp,.skeuocard.js small,.skeuocard.js strike,.skeuocard.js strong,.skeuocard.js sub,.skeuocard.js sup,.skeuocard.js tt,.skeuocard.js var,.skeuocard.js b,.skeuocard.js u,.skeuocard.js i,.skeuocard.js center,.skeuocard.js dl,.skeuocard.js dt,.skeuocard.js dd,.skeuocard.js ol,.skeuocard.js ul,.skeuocard.js li,.skeuocard.js fieldset,.skeuocard.js form,.skeuocard.js label,.skeuocard.js legend,.skeuocard.js table,.skeuocard.js caption,.skeuocard.js tbody,.skeuocard.js tfoot,.skeuocard.js thead,.skeuocard.js tr,.skeuocard.js th,.skeuocard.js td,.skeuocard.js article,.skeuocard.js aside,.skeuocard.js canvas,.skeuocard.js details,.skeuocard.js figcaption,.skeuocard.js figure,.skeuocard.js footer,.skeuocard.js header,.skeuocard.js hgroup,.skeuocard.js menu,.skeuocard.js nav,.skeuocard.js section,.skeuocard.js summary,.skeuocard.js time,.skeuocard.js mark,.skeuocard.js audio,.skeuocard.js video{margin:0;padding:0;border:0;outline:0;font-size:100%;font:inherit;vertical-align:baseline}.skeuocard.js article,.skeuocard.js aside,.skeuocard.js details,.skeuocard.js figcaption,.skeuocard.js figure,.skeuocard.js footer,.skeuocard.js header,.skeuocard.js hgroup,.skeuocard.js menu,.skeuocard.js nav,.skeuocard.js section{display:block}.skeuocard.js body{line-height:1}.skeuocard.js ol,.skeuocard.js ul{list-style:none}.skeuocard.js blockquote,.skeuocard.js q{quotes:none}.skeuocard.js blockquote:before,.skeuocard.js blockquote:after,.skeuocard.js q:before,.skeuocard.js q:after{content:'';content:none}.skeuocard.js ins{text-decoration:none}.skeuocard.js del{text-decoration:line-through}.skeuocard.js table{border-collapse:collapse;border-spacing:0}.skeuocard.js input,.skeuocard.js fieldset{line-height:normal;height:auto;padding:0px;margin:0px;display:inline-block;width:auto}
/*# sourceMappingURL=skeuocard.reset.css.map */

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.3 KiB

After

Width:  |  Height:  |  Size: 4.2 KiB

View file

@ -6,7 +6,7 @@
</head>
<body>
<h1>Neocities is temporarily unavailable</h1>
<p>Neocities is currently undergoing maintenance (or is experiencing an outage), we will be back shortly! Check <a href="https://twitter.com/neocities">@neocities</a> for status updates.</p>
<p>Neocities is currently undergoing maintenance (or is experiencing an outage), we will be back shortly! Check <a href="https://bsky.app/profile/neocities.org">@neocities.org on Bluesky</a> for status updates.</p>
<p>Our apologies for the inconvenience.</p>
<iframe width="560" height="315" src="https://www.youtube-nocookie.com/embed/-b1ZuF5yKoQ?rel=0&amp;start=2230" frameborder="0" allowfullscreen></iframe>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 551 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Some files were not shown because too many files have changed in this diff Show more