update aws gem, stub in rake tasks for s3

This commit is contained in:
r888888888 2017-02-08 13:31:32 -08:00
parent b57b54113f
commit f0a8598ba2
3 changed files with 100 additions and 27 deletions

View File

@ -71,12 +71,14 @@ GEM
public_suffix (~> 2.0, >= 2.0.2) public_suffix (~> 2.0, >= 2.0.2)
arel (6.0.3) arel (6.0.3)
awesome_print (1.6.1) awesome_print (1.6.1)
aws-sdk (2.1.14) aws-sdk (2.7.4)
aws-sdk-resources (= 2.1.14) aws-sdk-resources (= 2.7.4)
aws-sdk-core (2.1.14) aws-sdk-core (2.7.4)
aws-sigv4 (~> 1.0)
jmespath (~> 1.0) jmespath (~> 1.0)
aws-sdk-resources (2.1.14) aws-sdk-resources (2.7.4)
aws-sdk-core (= 2.1.14) aws-sdk-core (= 2.7.4)
aws-sigv4 (1.0.0)
bcrypt (3.1.10) bcrypt (3.1.10)
bcrypt (3.1.10-x64-mingw32) bcrypt (3.1.10-x64-mingw32)
bcrypt-ruby (3.1.5) bcrypt-ruby (3.1.5)
@ -194,8 +196,7 @@ GEM
rake rake
rdoc rdoc
semver2 semver2
jmespath (1.0.2) jmespath (1.3.1)
multi_json (~> 1.0)
json (1.8.6) json (1.8.6)
jwt (1.5.6) jwt (1.5.6)
kgio (2.10.0) kgio (2.10.0)

View File

@ -105,11 +105,11 @@ class Post < ActiveRecord::Base
end end
def file_url def file_url
if cdn_hosted? # if cdn_hosted?
Danbooru.config.danbooru_s3_base_url + "/#{file_path_prefix}#{md5}.#{file_ext}" # Danbooru.config.danbooru_s3_base_url + "/#{file_path_prefix}#{md5}.#{file_ext}"
else # else
"/data/#{seo_tag_string}#{file_path_prefix}#{md5}.#{file_ext}" "/data/#{seo_tag_string}#{file_path_prefix}#{md5}.#{file_ext}"
end # end
end end
def large_file_url def large_file_url

View File

@ -1,23 +1,95 @@
require 'danbooru_image_resizer/danbooru_image_resizer' require 'danbooru_image_resizer/danbooru_image_resizer'
namespace :images do namespace :images do
desc "Enable CDN" desc "Upload large images to S3"
task :enable_cdn, [:min_id, :max_id] => :environment do |t, args| task :upload_large_to_s3, [:min_id, :max_id] => :environment do |t, args|
CurrentUser.scoped(User.admins.first, "127.0.0.1") do min_id = args[:min_id]
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key) max_id = args[:max_id]
Aws.config.update({
region: "us-east-1",
credentials: credentials
})
client = Aws::S3::Client.new
bucket = Danbooru.config.aws_s3_bucket_name
Post.where("id >= ? and id <= ?", args[:min_id], args[:max_id]).find_each do |post| credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
post.cdn_hosted = true Aws.config.update({
post.save region: "us-west-2",
key = File.basename(post.file_path) credentials: credentials
client.copy_object(bucket: bucket, key: key, acl: "public-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY") })
# client.put_object(bucket: bucket, key: key, body: body, content_md5: base64_md5, acl: "public-read", storage_class: "STANDARD") client = Aws::S3::Client.new
bucket = "danbooru-large"
Post.where("id >= ? and id <= ? and image_width > ?", min_id, max_id, Danbooru.config.large_image_width).find_each do |post|
if File.exists?(post.large_file_path)
key = File.basename(post.large_file_path)
body = open(post.large_file_path, "rb")
client.put_object(bucket: bucket, key: key, acl: "authenticated-read", body: body, content_md5: base64_md5)
end
end
end
desc "Upload previews to S3"
task :upload_preview_to_s3, [:min_id, :max_id] => :environment do |t, args|
min_id = args[:min_id]
max_id = args[:max_id]
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
Aws.config.update({
region: "us-west-2",
credentials: credentials
})
client = Aws::S3::Client.new
bucket = "danbooru-preview"
Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
if File.exists?(post.preview_file_path)
key = File.basename(post.preview_file_path)
body = open(post.large_file_path, "rb")
client.put_object(bucket: bucket, key: key, acl: "authenticated-read", body: body, content_md5: base64_md5)
end
end
end
desc "Reset S3 + Storage Class"
task :reset_s3, [:min_id, :max_id] => :environment do |t, args|
min_id = args[:min_id]
max_id = args[:max_id]
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
Aws.config.update({
region: "us-east-1",
credentials: credentials
})
client = Aws::S3::Client.new
bucket = Danbooru.config.aws_s3_bucket_name
Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
key = File.basename(post.file_path)
client.copy_object(bucket: bucket, key: key, acl: "authenticated-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY")
end
end
task :restore_glacier, [:min_id, :max_id] => :environment do |t, args|
min_id = args[:min_id] # 10_001
max_id = args[:max_id] # 50_000
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
Aws.config.update({
region: "us-east-1",
credentials: credentials
})
client = Aws::S3::Client.new
bucket = Danbooru.config.aws_s3_bucket_name
Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
key = File.basename(post.file_path)
begin
client.restore_object(
bucket: bucket,
key: key,
restore_request: {
days: 1,
glacier_job_parameters: {
tier: "Bulk"
}
}
)
rescue Aws::S3::Errors::InvalidObjectState, Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::RestoreAlreadyInProgress
end end
end end
end end