stats: cleanup

This commit is contained in:
Kyle Drake 2025-01-31 11:09:12 -06:00
parent 514836fd4f
commit cf712b7175

View file

@ -19,14 +19,14 @@ class Stat < Sequel::Model
].first
end
def parse_logfiles(path)
def parse_logfiles(logfiles_path)
total_site_stats = {}
cache_control_ips = $config['cache_control_ips']
site_logs = {}
Dir["#{path}/*.log.gz"].each do |log_path|
Dir["#{logfiles_path}/*.log.gz"].each do |log_path|
gzfile = File.open log_path, 'r'
logfile = Zlib::GzipReader.new gzfile
@ -51,7 +51,6 @@ class Stat < Sequel::Model
views: 0,
bandwidth: 0,
view_ips: [],
ips: [],
referrers: {},
paths: {}
} unless site_logs[log_time][username]
@ -140,134 +139,3 @@ class Stat < Sequel::Model
end
end
end
=begin
require 'io/extra'
require 'geoip'
# Note: This isn't really a class right now.
module Stat
class << self
def parse_logfiles(path)
Dir["#{path}/*.log"].each do |logfile_path|
parse_logfile logfile_path
FileUtils.rm logfile_path
end
end
def parse_logfile(path)
geoip = GeoIP.new GEOCITY_PATH
logfile = File.open path, 'r'
hits = []
while hit = logfile.gets
time, username, size, path, ip, referrer = hit.split ' '
site = Site.select(:id).where(username: username).first
next unless site
paths_dataset = StatsDB[:paths]
path_record = paths_dataset[name: path]
path_id = path_record ? path_record[:id] : paths_dataset.insert(name: path)
referrers_dataset = StatsDB[:referrers]
referrer_record = referrers_dataset[name: referrer]
referrer_id = referrer_record ? referrer_record[:id] : referrers_dataset.insert(name: referrer)
location_id = nil
if city = geoip.city(ip)
locations_dataset = StatsDB[:locations].select(:id)
location_hash = {country_code2: city.country_code2, region_name: city.region_name, city_name: city.city_name}
location = locations_dataset.where(location_hash).first
location_id = location ? location[:id] : locations_dataset.insert(location_hash)
end
hits << [site.id, referrer_id, path_id, location_id, size, time]
end
StatsDB[:hits].import(
[:site_id, :referrer_id, :path_id, :location_id, :bytes_sent, :logged_at],
hits
)
end
end
end
=begin
def parse_logfile(path)
hits = {}
visits = {}
visit_ips = {}
logfile = File.open path, 'r'
while hit = logfile.gets
time, username, size, path, ip, referrer = hit.split ' '
hits[username] ||= 0
hits[username] += 1
visit_ips[username] = [] if !visit_ips[username]
unless visit_ips[username].include? ip
visits[username] ||= 0
visits[username] += 1
visit_ips[username] << ip
end
end
logfile.close
hits.each do |username,hitcount|
DB['update sites set hits=hits+? where username=?', hitcount, username].first
end
visits.each do |username,visitcount|
DB['update sites set views=views+? where username=?', visitcount, username].first
end
end
end
=end
=begin
def self.parse(logfile_path)
hits = {}
visits = {}
visit_ips = {}
logfile = File.open logfile_path, 'r'
while hit = logfile.gets
time, username, size, path, ip = hit.split ' '
hits[username] ||= 0
hits[username] += 1
visit_ips[username] = [] if !visit_ips[username]
unless visit_ips[username].include?(ip)
visits[username] ||= 0
visits[username] += 1
visit_ips[username] << ip
end
end
logfile.close
hits.each do |username,hitcount|
DB['update sites set hits=hits+? where username=?', hitcount, username].first
end
visits.each do |username,visitcount|
DB['update sites set views=views+? where username=?', visitcount, username].first
end
end
=end