forked from e621ng/e621ng
[Cleanup] Remove some storage manager code
This commit is contained in:
parent
451e23c121
commit
3857eeccf3
3
.env
3
.env
@ -75,7 +75,4 @@
|
||||
# export DANBOORU_PIXIV_PASSWORD=
|
||||
# export DANBOORU_TWITTER_API_KEY=
|
||||
# export DANBOORU_TWITTER_API_SECRET=
|
||||
# export DANBOORU_AWS_ACCESS_KEY_ID=
|
||||
# export DANBOORU_AWS_SECRET_ACCESS_KEY=
|
||||
# export DANBOORU_AWS_SQS_REGION=
|
||||
# export DANBOORU_IQDBS_SERVER=
|
||||
|
@ -1,54 +0,0 @@
|
||||
require 'securerandom'
|
||||
|
||||
class RemoteFileManager
|
||||
attr_reader :path
|
||||
|
||||
def initialize(path)
|
||||
@path = path
|
||||
end
|
||||
|
||||
def distribute_to_archive(dest_url)
|
||||
uri = URI.parse(dest_url)
|
||||
dir_name = uri.host.split(".").first
|
||||
uuid = SecureRandom.uuid
|
||||
dest_path = "/var/www/#{dir_name}#{uri.path}"
|
||||
temp_path = "/tmp/rfm-#{Danbooru.config.server_host}-#{uuid}"
|
||||
|
||||
Net::SFTP.start(uri.host, Danbooru.config.archive_server_login) do |ftp|
|
||||
ftp.upload!(path, temp_path)
|
||||
begin
|
||||
ftp.rename!(temp_path, dest_path)
|
||||
rescue Net::SFTP::StatusException
|
||||
ftp.remove!(dest_path)
|
||||
ftp.rename!(temp_apth, dest_path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def distribute
|
||||
uuid = SecureRandom.uuid
|
||||
temp_path = "/tmp/rfm-#{Danbooru.config.server_host}-#{uuid}"
|
||||
|
||||
Danbooru.config.other_server_hosts.each do |hostname|
|
||||
Net::SFTP.start(hostname, Danbooru.config.remote_server_login) do |ftp|
|
||||
ftp.upload!(path, temp_path)
|
||||
begin
|
||||
ftp.rename!(temp_path, path)
|
||||
rescue Net::SFTP::StatusException
|
||||
# this typically means the file already exists
|
||||
# so delete and try renaming again
|
||||
ftp.remove!(path)
|
||||
ftp.rename!(temp_path, path)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def delete
|
||||
Danbooru.config.other_server_hosts.each do |hostname|
|
||||
Net::SFTP.start(hostname, Danbooru.config.remote_server_login) do |ftp|
|
||||
ftp.remove(path)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -19,14 +19,6 @@ manager with no constraints as a default case.
|
||||
matcher.add_manager(id: 850_001..2_000_000) do
|
||||
StorageManager::SFTP.new("raikou2.donmai.us", base_url: "https://raikou2.donmai.us", hierarchical: true, base_dir: "/var/www/raikou2")
|
||||
end
|
||||
|
||||
matcher.add_manager(id: 1..3_000_000, type: [:large, :original]) do
|
||||
StorageManager::SFTP.new(*Danbooru.config.all_server_hosts, base_url: "https://hijiribe.donmai.us/data")
|
||||
end
|
||||
|
||||
matcher.add_manager({}) do
|
||||
StorageManager::SFTP.new(*Danbooru.config.all_server_hosts, base_url: "#{CurrentUser.root_url}/data")
|
||||
end
|
||||
end
|
||||
|
||||
=end
|
||||
@ -109,4 +101,3 @@ class StorageManager::Match < StorageManager
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -95,16 +95,6 @@ class Post < ApplicationRecord
|
||||
Danbooru.config.storage_manager.move_file_undelete(self)
|
||||
end
|
||||
|
||||
def distribute_files(file, sample_file, preview_file)
|
||||
storage_manager.store_file(file, self, :original)
|
||||
storage_manager.store_file(sample_file, self, :large) if sample_file.present?
|
||||
storage_manager.store_file(preview_file, self, :preview) if preview_file.present?
|
||||
end
|
||||
|
||||
def backup_storage_manager
|
||||
Danbooru.config.backup_storage_manager
|
||||
end
|
||||
|
||||
def storage_manager
|
||||
Danbooru.config.storage_manager
|
||||
end
|
||||
|
@ -432,36 +432,11 @@ fart'
|
||||
}
|
||||
end
|
||||
|
||||
# Disable the forced use of HTTPS.
|
||||
# def ssl_options
|
||||
# false
|
||||
# end
|
||||
|
||||
# The name of the server the app is hosted on.
|
||||
def server_host
|
||||
Socket.gethostname
|
||||
end
|
||||
|
||||
# Names of all Danbooru servers which serve out of the same common database.
|
||||
# Used in conjunction with load balancing to distribute files from one server to
|
||||
# the others. This should match whatever gethostname returns on the other servers.
|
||||
def all_server_hosts
|
||||
[server_host]
|
||||
end
|
||||
|
||||
# Names of other Danbooru servers.
|
||||
def other_server_hosts
|
||||
@other_server_hosts ||= all_server_hosts.reject {|x| x == server_host}
|
||||
end
|
||||
|
||||
def remote_server_login
|
||||
"danbooru"
|
||||
end
|
||||
|
||||
def archive_server_login
|
||||
"danbooru"
|
||||
end
|
||||
|
||||
# The method to use for storing image files.
|
||||
def storage_manager
|
||||
# Store files on the local filesystem.
|
||||
@ -475,24 +450,14 @@ fart'
|
||||
# ~/.ssh_config or in the ssh_options param (ref: http://net-ssh.github.io/net-ssh/Net/SSH.html#method-c-start)
|
||||
# StorageManager::SFTP.new("i1.example.com", "i2.example.com", base_dir: "/mnt/backup", hierarchical: false, ssh_options: {})
|
||||
|
||||
# Store files in an S3 bucket. The bucket must already exist and be
|
||||
# writable by you. Configure your S3 settings in aws_region and
|
||||
# aws_credentials below, or in the s3_options param (ref:
|
||||
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html#initialize-instance_method)
|
||||
# StorageManager::S3.new("my_s3_bucket", base_url: "https://my_s3_bucket.s3.amazonaws.com/", s3_options: {})
|
||||
|
||||
# Select the storage method based on the post's id and type (preview, large, or original).
|
||||
# StorageManager::Hybrid.new do |id, md5, file_ext, type|
|
||||
# ssh_options = { user: "danbooru" }
|
||||
#
|
||||
# if type.in?([:large, :original]) && id.in?(0..850_000)
|
||||
# StorageManager::SFTP.new("raikou1.donmai.us", base_url: "https://raikou1.donmai.us", base_dir: "/path/to/files", hierarchical: true, ssh_options: ssh_options)
|
||||
# elsif type.in?([:large, :original]) && id.in?(850_001..2_000_000)
|
||||
# StorageManager::SFTP.new("raikou2.donmai.us", base_url: "https://raikou2.donmai.us", base_dir: "/path/to/files", hierarchical: true, ssh_options: ssh_options)
|
||||
# elsif type.in?([:large, :original]) && id.in?(2_000_001..3_000_000)
|
||||
# StorageManager::SFTP.new(*all_server_hosts, base_url: "https://hijiribe.donmai.us/data", ssh_options: ssh_options)
|
||||
# else
|
||||
# StorageManager::SFTP.new(*all_server_hosts, ssh_options: ssh_options)
|
||||
# StorageManager::SFTP.new("raikou2.donmai.us", base_url: "https://raikou2.donmai.us", base_dir: "/path/to/files", hierarchical: true, ssh_options: ssh_options)
|
||||
# end
|
||||
# end
|
||||
end
|
||||
@ -508,12 +473,6 @@ fart'
|
||||
# Backup files to /mnt/backup on a remote system. Configure SSH settings
|
||||
# in ~/.ssh_config or in the ssh_options param (ref: http://net-ssh.github.io/net-ssh/Net/SSH.html#method-c-start)
|
||||
# StorageManager::SFTP.new("www.example.com", base_dir: "/mnt/backup", ssh_options: {})
|
||||
|
||||
# Backup files to an S3 bucket. The bucket must already exist and be
|
||||
# writable by you. Configure your S3 settings in aws_region and
|
||||
# aws_credentials below, or in the s3_options param (ref:
|
||||
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html#initialize-instance_method)
|
||||
# StorageManager::S3.new("my_s3_bucket_name", s3_options: {})
|
||||
end
|
||||
|
||||
#TAG CONFIGURATION
|
||||
|
@ -1,154 +0,0 @@
|
||||
namespace :images do
|
||||
desc "Distribute posts to all servers via SFTP"
|
||||
task :distribute, [:min_id, :max_id] => :environment do |t, args|
|
||||
min_id = args[:min_id]
|
||||
max_id = args[:max_id]
|
||||
lsm = StorageManager::Local.new(base_url: "https://danbooru.donmai.us/data", base_dir: "/var/www/danbooru2/shared/data", hierarchical: false)
|
||||
sftpsm = StorageManager::SFTP.new(*Danbooru.config.all_server_hosts, base_url: "https://danbooru.donmai.us/data")
|
||||
|
||||
Post.where("id between ? and ?", min_id, max_id).find_each do |post|
|
||||
sftpsm.store_file(lsm.open_file(post, :original), post, :original)
|
||||
sftpsm.store_file(lsm.open_file(post, :large), post, :large) if post.has_large?
|
||||
sftpsm.store_file(lsm.open_file(post, :preview), post, :preview) if post.has_preview?
|
||||
end
|
||||
end
|
||||
|
||||
desc "Reset S3 + Storage Class"
|
||||
task :reset_s3, [:min_id, :max_id] => :environment do |t, args|
|
||||
min_id = args[:min_id] # 1
|
||||
max_id = args[:max_id] # 50_000
|
||||
|
||||
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
|
||||
Aws.config.update({
|
||||
region: "us-east-1",
|
||||
credentials: credentials
|
||||
})
|
||||
client = Aws::S3::Client.new
|
||||
bucket = Danbooru.config.aws_s3_bucket_name
|
||||
|
||||
Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
|
||||
key = File.basename(post.file_path)
|
||||
begin
|
||||
client.copy_object(bucket: bucket, key: key, acl: "public-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY")
|
||||
puts "copied #{post.id}"
|
||||
rescue Aws::S3::Errors::InvalidObjectState
|
||||
puts "invalid state #{post.id}"
|
||||
rescue Aws::S3::Errors::NoSuchKey
|
||||
puts "missing #{post.id}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
desc "restore from glacier"
|
||||
task :restore_glacier, [:min_id, :max_id] => :environment do |t, args|
|
||||
min_id = args[:min_id] # 10_001
|
||||
max_id = args[:max_id] # 50_000
|
||||
|
||||
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
|
||||
Aws.config.update({
|
||||
region: "us-east-1",
|
||||
credentials: credentials
|
||||
})
|
||||
client = Aws::S3::Client.new
|
||||
bucket = Danbooru.config.aws_s3_bucket_name
|
||||
|
||||
Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
|
||||
key = "preview/" + File.basename(post.preview_file_path)
|
||||
begin
|
||||
client.restore_object(
|
||||
bucket: bucket,
|
||||
key: key,
|
||||
restore_request: {
|
||||
days: 1,
|
||||
glacier_job_parameters: {
|
||||
tier: "Bulk"
|
||||
}
|
||||
}
|
||||
)
|
||||
puts "restored #{post.id}"
|
||||
rescue Aws::S3::Errors::InvalidObjectState
|
||||
puts "already glaciered #{post.id}"
|
||||
rescue Aws::S3::Errors::NoSuchKey
|
||||
puts "missing #{post.id}"
|
||||
rescue Aws::S3::Errors::RestoreAlreadyInProgress
|
||||
puts "already restoring #{post.id}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
desc "Redownload an image from Pixiv"
|
||||
task :download_pixiv => :environment do
|
||||
post_id = ENV["id"]
|
||||
|
||||
if post_id !~ /\d+/
|
||||
raise "Usage: regen_img.rb POST_ID"
|
||||
end
|
||||
|
||||
post = Post.find(post_id)
|
||||
post.source =~ /(\d{5,})/
|
||||
if illust_id = $1
|
||||
response = PixivApiClient.new.work(illust_id)
|
||||
upload = Upload.new
|
||||
upload.source = response.pages.first
|
||||
upload.file_ext = post.file_ext
|
||||
upload.image_width = post.image_width
|
||||
upload.image_height = post.image_height
|
||||
upload.md5 = post.md5
|
||||
upload.download_from_source(post.file_path)
|
||||
post.distribute_files
|
||||
end
|
||||
end
|
||||
|
||||
desc "Regenerates all images for a post id"
|
||||
task :regen => :environment do
|
||||
post_id = ENV["id"]
|
||||
|
||||
if post_id !~ /\d+/
|
||||
raise "Usage: regen id=n"
|
||||
end
|
||||
|
||||
post = Post.find(post_id)
|
||||
upload = Upload.new
|
||||
upload.file_ext = post.file_ext
|
||||
upload.image_width = post.image_width
|
||||
upload.image_height = post.image_height
|
||||
upload.md5 = post.md5
|
||||
upload.generate_resizes(post.file_path)
|
||||
post.distribute_files
|
||||
end
|
||||
|
||||
desc "Generate thumbnail-sized images of posts"
|
||||
task :generate_preview => :environment do
|
||||
width = 150
|
||||
post_id = ENV["id"]
|
||||
|
||||
if post_id !~ /\d+/
|
||||
raise "Usage: generate_preview id=n"
|
||||
end
|
||||
|
||||
Post.where(id: post_id).find_each do |post|
|
||||
if post.is_image?
|
||||
puts "resizing preview #{post.id}"
|
||||
DanbooruImageResizer.resize(post.file_path, post.preview_file_path, width, width, 90)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
desc "Generate large-sized images of posts"
|
||||
task :generate_large => :environment do
|
||||
post_id = ENV["id"]
|
||||
|
||||
if post_id !~ /\d+/
|
||||
raise "Usage: generate_large id=n"
|
||||
end
|
||||
|
||||
Post.where(id: post_id).find_each do |post|
|
||||
if post.is_image? && post.has_large?
|
||||
puts "resizing large #{post.id}"
|
||||
DanbooruImageResizer.resize(post.file_path, post.large_file_path, Danbooru.config.large_image_width, nil, 90)
|
||||
post.distribute_files
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -192,30 +192,6 @@ class UploadServiceTest < ActiveSupport::TestCase
|
||||
end
|
||||
end
|
||||
|
||||
context ".process_file" do
|
||||
setup do
|
||||
@upload = FactoryBot.build(:jpg_upload)
|
||||
@file = @upload.file
|
||||
end
|
||||
|
||||
context "with an original_post_id" do
|
||||
should "run" do
|
||||
subject.expects(:distribute_files).times(3)
|
||||
subject.process_file(@upload, @file, original_post_id: 12345)
|
||||
end
|
||||
end
|
||||
|
||||
should "run" do
|
||||
subject.expects(:distribute_files).times(3)
|
||||
subject.process_file(@upload, @file)
|
||||
assert_equal("jpg", @upload.file_ext)
|
||||
assert_equal(28086, @upload.file_size)
|
||||
assert_equal("ecef68c44edb8a0d6a3070b5f8e8ee76", @upload.md5)
|
||||
assert_equal(335, @upload.image_height)
|
||||
assert_equal(500, @upload.image_width)
|
||||
end
|
||||
end
|
||||
|
||||
context ".generate_resizes" do
|
||||
context "for an ugoira" do
|
||||
setup do
|
||||
|
Loading…
Reference in New Issue
Block a user