deprecate ipfs

This commit is contained in:
Kyle Drake 2023-12-28 01:00:26 -06:00
parent d394b2601e
commit dde5225752
15 changed files with 19 additions and 287 deletions

View file

@ -27,7 +27,6 @@ gem 'paypal-recurring', require: 'paypal/recurring'
gem 'geoip' gem 'geoip'
gem 'io-extra', require: 'io/extra' gem 'io-extra', require: 'io/extra'
#gem 'rye' #gem 'rye'
gem 'base32'
gem 'coveralls_reborn', require: false gem 'coveralls_reborn', require: false
gem 'sanitize' gem 'sanitize'
gem 'will_paginate' gem 'will_paginate'

View file

@ -41,7 +41,6 @@ GEM
base58 (~> 0.2) base58 (~> 0.2)
keccak (~> 1.3) keccak (~> 1.3)
ansi (1.5.0) ansi (1.5.0)
base32 (0.3.4)
base58 (0.2.3) base58 (0.2.3)
base64 (0.2.0) base64 (0.2.0)
bcrypt (3.1.20) bcrypt (3.1.20)
@ -363,7 +362,6 @@ DEPENDENCIES
addressable (>= 2.8.0) addressable (>= 2.8.0)
adequate_crypto_address adequate_crypto_address
apparition! apparition!
base32
bcrypt bcrypt
capybara capybara
certified certified

View file

@ -167,8 +167,7 @@ def api_info_for(site)
created_at: site.created_at.rfc2822, created_at: site.created_at.rfc2822,
last_updated: site.site_updated_at ? site.site_updated_at.rfc2822 : nil, last_updated: site.site_updated_at ? site.site_updated_at.rfc2822 : nil,
domain: site.domain, domain: site.domain,
tags: site.tags.collect {|t| t.name}, tags: site.tags.collect {|t| t.name}
latest_ipfs_hash: site.latest_archive ? site.latest_archive.ipfs_hash : nil
} }
} }
end end

View file

@ -63,8 +63,7 @@ post '/settings/:username/profile' do
@site.update( @site.update(
profile_comments_enabled: params[:site][:profile_comments_enabled], profile_comments_enabled: params[:site][:profile_comments_enabled],
profile_enabled: params[:site][:profile_enabled], profile_enabled: params[:site][:profile_enabled]
ipfs_archiving_enabled: params[:site][:ipfs_archiving_enabled]
) )
flash[:success] = 'Profile settings changed.' flash[:success] = 'Profile settings changed.'
redirect "/settings/#{@site.username}#profile" redirect "/settings/#{@site.username}#profile"

View file

@ -38,14 +38,6 @@ get '/site/:username/?' do |username|
erb :'site', locals: {site: site, is_current_site: site == current_site} erb :'site', locals: {site: site, is_current_site: site == current_site}
end end
get '/site/:username/archives' do
@site = Site[username: params[:username]]
not_found if @site.nil? || @site.is_banned || @site.is_deleted || !@site.ipfs_archiving_enabled
@title = "Site archives for #{@site.title}"
@archives = @site.archives_dataset.limit(300).order(:updated_at.desc).all
erb :'site/archives'
end
MAX_STAT_POINTS = 30 MAX_STAT_POINTS = 30
get '/site/:username/stats' do get '/site/:username/stats' do
@default_stat_points = 7 @default_stat_points = 7

View file

@ -0,0 +1,17 @@
Sequel.migration do
up {
DB.drop_table :archives
DB.drop_column :sites, :ipfs_archiving_enabled
}
down {
DB.create_table! :archives do
Integer :site_id, index: true
String :ipfs_hash
DateTime :updated_at, index: true
unique [:site_id, :ipfs_hash]
end
DB.add_column :sites, :ipfs_archiving_enabled, :boolean, default: false
}
end

View file

@ -1,39 +0,0 @@
require 'base32'
class Archive < Sequel::Model
many_to_one :site
set_primary_key [:site_id, :ipfs_hash]
unrestrict_primary_key
MAXIMUM_ARCHIVES_PER_SITE = 5
ARCHIVE_WAIT_TIME = 1.minute
def before_destroy
unpin
super
end
def unpin
return nil
# Not ideal. An SoA version is in progress.
if ENV['RACK_ENV'] == 'production' && $config['ipfs_ssh_host'] && $config['ipfs_ssh_user']
rbox = Rye::Box.new $config['ipfs_ssh_host'], :user => $config['ipfs_ssh_user']
rbox.disable_safe_mode
begin
response = rbox.execute "ipfs pin rm #{ipfs_hash}"
output_array = response
rescue => e
return true if e.message =~ /indirect pins cannot be removed directly/
ensure
rbox.disconnect
end
else
line = Terrapin::CommandLine.new('ipfs', 'pin rm :ipfs_hash')
response = line.run ipfs_hash: ipfs_hash
output_array = response.to_s.split("\n")
end
end
def url
"https://#{ipfs_hash}.ipfs.neocitiesops.net"
end
end

View file

@ -206,8 +206,6 @@ class Site < Sequel::Model
one_to_many :stat_locations one_to_many :stat_locations
one_to_many :stat_paths one_to_many :stat_paths
one_to_many :archives
def self.supporter_ids def self.supporter_ids
parent_supporters = DB[%{SELECT id FROM sites WHERE plan_type IS NOT NULL AND plan_type != 'free'}].all.collect {|s| s[:id]} parent_supporters = DB[%{SELECT id FROM sites WHERE plan_type IS NOT NULL AND plan_type != 'free'}].all.collect {|s| s[:id]}
child_supporters = DB[%{select a.id as id from sites a, sites b where a.parent_site_id is not null and a.parent_site_id=b.id and (a.plan_type != 'free' or b.plan_type != 'free')}].all.collect {|s| s[:id]} child_supporters = DB[%{select a.id as id from sites a, sites b where a.parent_site_id is not null and a.parent_site_id=b.id and (a.plan_type != 'free' or b.plan_type != 'free')}].all.collect {|s| s[:id]}
@ -774,69 +772,6 @@ class Site < Sequel::Model
end end
end end
#Rye::Cmd.add_command :ipfs
def add_to_ipfs
# Not ideal. An SoA version is in progress.
return nil
if archives_dataset.count > Archive::MAXIMUM_ARCHIVES_PER_SITE
archives_dataset.order(:updated_at).first.destroy
end
if $config['ipfs_ssh_host'] && $config['ipfs_ssh_user']
rbox = Rye::Box.new $config['ipfs_ssh_host'], user: $config['ipfs_ssh_user']
begin
cidv0 = rbox.ipfs(:add, :r, :Q, "sites/#{sharding_dir}/#{self.username.gsub(/\/|\.\./, '')}").first
cidv1b32 = rbox.ipfs(:cid, :base32, cidv0).first
ensure
rbox.disconnect
end
else
line = Terrapin::CommandLine.new('ipfs', 'add -r -Q :path')
response = line.run(path: files_path).strip
line = Terrapin::CommandLine.new('ipfs', 'cid base32 :hash')
cidv1b32 = line.run(hash: response).strip
end
cidv1b32
end
def purge_old_archives
archives_dataset.order(:updated_at).offset(Archive::MAXIMUM_ARCHIVES_PER_SITE).all.each do |archive|
archive.destroy
end
end
def archive!
ipfs_hash = add_to_ipfs
archive = archives_dataset.where(ipfs_hash: ipfs_hash).first
if archive
archive.updated_at = Time.now
archive.save_changes
else
begin
add_archive ipfs_hash: ipfs_hash, updated_at: Time.now
rescue Sequel::UniqueConstraintViolation
# Record already exists, update timestamp
archives_dataset.where(ipfs_hash: ipfs_hash).first.update updated_at: Time.now
end
end
add_redis_proxy_dnslink
end
def add_redis_proxy_dnslink
if host =~ /(.+)\.neocities\.org/ && latest_archive
$redis_proxy.hset "dns-#{host}", 'TXT', "dnslink=/ipfs/#{latest_archive.ipfs_hash}"
end
end
def latest_archive
@latest_archive ||= archives_dataset.order(:updated_at.desc).first
end
def is_directory?(path) def is_directory?(path)
File.directory? files_path(path) File.directory? files_path(path)
end end
@ -1707,10 +1642,6 @@ class Site < Sequel::Model
time, time,
self.id self.id
].first ].first
if ipfs_archiving_enabled == true
ArchiveWorker.perform_in Archive::ARCHIVE_WAIT_TIME, self.id
end
end end
reload reload

View file

@ -95,7 +95,6 @@ describe 'api' do
it 'succeeds for valid sitename' do it 'succeeds for valid sitename' do
create_site create_site
@site.update hits: 31337, domain: 'derp.com', new_tags_string: 'derpie, man' @site.update hits: 31337, domain: 'derp.com', new_tags_string: 'derpie, man'
@site.add_archive ipfs_hash: 'QmXGTaGWTT1uUtfSb2sBAvArMEVLK4rQEcQg5bv7wwdzwU'
get '/api/info', sitename: @user get '/api/info', sitename: @user
_(res[:result]).must_equal 'success' _(res[:result]).must_equal 'success'
_(res[:info][:sitename]).must_equal @site.username _(res[:info][:sitename]).must_equal @site.username
@ -104,16 +103,9 @@ describe 'api' do
_(res[:info][:last_updated]).must_be_nil _(res[:info][:last_updated]).must_be_nil
_(res[:info][:domain]).must_equal 'derp.com' _(res[:info][:domain]).must_equal 'derp.com'
_(res[:info][:tags]).must_equal ['derpie', 'man'] _(res[:info][:tags]).must_equal ['derpie', 'man']
_(res[:info][:latest_ipfs_hash]).must_equal 'QmXGTaGWTT1uUtfSb2sBAvArMEVLK4rQEcQg5bv7wwdzwU'
_(@site.reload.api_calls).must_equal 0 _(@site.reload.api_calls).must_equal 0
end end
it 'shows latest ipfs hash as nil when not present' do
create_site
get '/api/info', sitename: @user
_(res[:info][:latest_ipfs_hash]).must_be_nil
end
it 'fails for bad auth' do it 'fails for bad auth' do
basic_authorize 'derp', 'fake' basic_authorize 'derp', 'fake'
get '/api/info' get '/api/info'

View file

@ -1,29 +0,0 @@
require_relative '../environment.rb'
describe ArchiveWorker do
it 'stores an IPFS archive' do
return if ENV['CI']
site = Fabricate :site
ipfs_hash = site.add_to_ipfs
ArchiveWorker.new.perform site.id
_(site.archives.length).must_equal 1
archive_one = site.archives.first
_(archive_one.ipfs_hash).wont_be_nil
_(archive_one.ipfs_hash).must_equal ipfs_hash
_(archive_one.updated_at).wont_be_nil
new_updated_at = Time.now - 500
archive_one.update updated_at: new_updated_at
ArchiveWorker.new.perform site.id
_(archive_one.reload.updated_at).wont_equal new_updated_at
site.store_files [{filename: 'test.jpg', tempfile: Rack::Test::UploadedFile.new('./tests/files/test.jpg', 'image/jpeg')}]
ArchiveWorker.new.perform site.id
site.reload
_(site.archives.length).must_equal 2
archive_two = site.archives_dataset.exclude(ipfs_hash: archive_one.ipfs_hash).first
_(archive_two.ipfs_hash).wont_be_nil
end
end

View file

@ -1,37 +0,0 @@
<div class="header-Outro">
<div class="row content single-Col">
<h1>Neocities and the Distributed Web</h1>
<h2 class="subtitle">Working to build a faster, better, more permanent web.</h2>
</div>
</div>
<div class="content single-Col misc-page">
<img src="/img/neocities-ipfs.jpg" style="margin-bottom: 20px">
<article role="article">
<p>
Neocities has launched an implementation of <a href="https://ipfs.io">IPFS</a>, a protocol for the distributed web. The idea is simple: Instead of serving web sites from central servers, a distributed web allows any computer to help serve a site.
</p>
<p>
This is still very early stage technology and subject to change. To learn more, see our <a href="https://blog.neocities.org/blog/2015/09/08/its-time-for-the-distributed-web.html">blog post</a>.
</p>
<p>
IPFS archiving is now enabled on all sites. You'll see an IPFS CID link on the site profile, and an archive link that allows you to see past versions of your site (note: this is still a preview, so past site archives may still disappear, but we're working on making it better).
</p>
<p>
If you want to play around with this new technology, you can get IPFS for your computer and use it to retrieve content from our IPFS node servers. All you need to do is <a href="https://ipfs.io/docs/install/">download the IPFS daemon</a> (OSX/Linux only for now), and run the following command in your terminal:
</p>
<p>
<code>$ ipfs pin add -r THE_IPFS_CID_FOR_YOUR_SITE</code>
</p>
<% if signed_in? %>
<p>
IPFS archiving is not enabled by default for all sites. If you would like to enable IPFS for your site, please visit your site's <a href="/settings/<%= current_site.username %>">settings</a> and enable it.
</p>
<% end %>
</article>
</div>

View file

@ -20,15 +20,6 @@
> Disable Site Profile > Disable Site Profile
<br> <br>
</div> </div>
<h3>IPFS Archiving</h3>
<div style="display: inline-block; text-align: left; margin-bottom: 10px">
<input name="site[ipfs_archiving_enabled]" type="hidden" value="false">
<input name="site[ipfs_archiving_enabled]" type="checkbox" value="true"
<% if @site.ipfs_archiving_enabled == true %>checked<% end %>
> Enable IPFS Archiving <small>(<a href="/distributed-web">what is this?</a>)</small>
</div>
</div> </div>
<input class="btn-Action" type="submit" value="Update Settings"> <input class="btn-Action" type="submit" value="Update Settings">

View file

@ -19,11 +19,6 @@
<div class="col col-50 profile-info"> <div class="col col-50 profile-info">
<h2 class="eps title-with-badge"><span><%= site.title %></span> <% if site.supporter? %><a href="/supporter" class="supporter-badge" title="Neocities Supporter"></a> <% end %></h2> <h2 class="eps title-with-badge"><span><%= site.title %></span> <% if site.supporter? %><a href="/supporter" class="supporter-badge" title="Neocities Supporter"></a> <% end %></h2>
<p class="site-url"><a href="<%= site.uri %>"><%= site.host %></a></p> <p class="site-url"><a href="<%= site.uri %>"><%= site.host %></a></p>
<!--
<% if false #site.latest_archive %>
<p><a href="<%= site.latest_archive.url %>" style="margin-right: 5px"><%= site.latest_archive.ipfs_hash %></a><small style="font-size: 7pt"><a href="/permanent-web">(what is this?)</a></small></p>
<% end %>
-->
<% follow_count = site.follows_dataset.count %> <% follow_count = site.follows_dataset.count %>
<div class="stats"> <div class="stats">
<div class="stat"><strong><%= site.views.format_large_number %></strong> <span>view<%= site.views == 1 ? '' : 's' %></span></div> <div class="stat"><strong><%= site.views.format_large_number %></strong> <span>view<%= site.views == 1 ? '' : 's' %></span></div>
@ -36,10 +31,6 @@
<a href="/dashboard" class="btn-Action edit"><i class="fa fa-edit" title="Edit"></i> Edit Site</a> <a href="/dashboard" class="btn-Action edit"><i class="fa fa-edit" title="Edit"></i> Edit Site</a>
<% end %> <% end %>
<% if site.latest_archive && site.ipfs_archiving_enabled %>
<a href="/site/<%= site.username %>/archives" class="btn-Action edit"><i class="fa fa-history" title="Archives"></i> Archives</a>
<% end %>
<% if current_site && current_site != site %> <% if current_site && current_site != site %>
<% is_following = current_site.is_following?(site) %> <% is_following = current_site.is_following?(site) %>

View file

@ -1,33 +0,0 @@
<div class="header-Outro">
<div class="row content single-Col">
<h1>IPFS Archives</h1>
</div>
</div>
<div class="content single-Col misc-page">
<article role="article">
<% if @archives.length == 0 %>
No archives yet.
<% else %>
<table class="table">
<tr>
<th>IPFS CID <small style="display: inline"><a href="/permanent-web">(what is this?)</a></small></th>
<th>Archived Time</th>
</tr>
<% @archives.each do |archive| %>
<tr>
<td><a href="<%= archive.url %>"><%= archive.ipfs_hash %></a></td>
<td><%= archive.updated_at.ago.downcase %></td>
</tr>
<% end %>
</table>
<p>
This is a preview release of a new technology. We're still figuring things out, and may stop hosting archives without notice. <a href="/permanent-web">Learn how you can host your own copies of these archives</a>.
</p>
<p>
Archives are captured once every <%= Archive::ARCHIVE_WAIT_TIME / 60 %> minutes, so if you don't see your latest changes, check back later.
</p>
<% end %>
</article>
</div>

View file

@ -1,39 +0,0 @@
require 'sidekiq/api'
class ArchiveWorker
include Sidekiq::Worker
sidekiq_options queue: :archive, retry: 2, backtrace: true
def perform(site_id)
site = Site[site_id]
return if site.nil? || site.is_banned? || site.is_deleted
if site.site_files_dataset.count > 1000
logger.info "skipping #{site_id} (#{site.username}) due to > 1000 files"
return
end
queue = Sidekiq::Queue.new self.class.sidekiq_options_hash['queue']
logger.info "JOB ID: #{jid} #{site_id.inspect}"
queue.each do |job|
if job.args == [site_id] && job.jid != jid
logger.info "DELETING #{job.jid} for site_id #{site_id}"
job.delete
end
end
scheduled_jobs = Sidekiq::ScheduledSet.new.select do |scheduled_job|
scheduled_job.klass == 'ArchiveWorker' &&
scheduled_job.args[0] == site_id
end
scheduled_jobs.each do |scheduled_job|
logger.info "DELETING scheduled job #{scheduled_job.jid} for site_id #{site_id}"
scheduled_job.delete
end
logger.info "ARCHIVING: #{site.username}"
site.archive!
end
end