From f0a8598ba204a82a8f853caae03c770522aaf00b Mon Sep 17 00:00:00 2001 From: r888888888 Date: Wed, 8 Feb 2017 13:31:32 -0800 Subject: [PATCH] update aws gem, stub in rake tasks for s3 --- Gemfile.lock | 15 +++--- app/models/post.rb | 8 ++-- lib/tasks/images.rake | 104 +++++++++++++++++++++++++++++++++++------- 3 files changed, 100 insertions(+), 27 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index f41a261b7..ae7f4e144 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -71,12 +71,14 @@ GEM public_suffix (~> 2.0, >= 2.0.2) arel (6.0.3) awesome_print (1.6.1) - aws-sdk (2.1.14) - aws-sdk-resources (= 2.1.14) - aws-sdk-core (2.1.14) + aws-sdk (2.7.4) + aws-sdk-resources (= 2.7.4) + aws-sdk-core (2.7.4) + aws-sigv4 (~> 1.0) jmespath (~> 1.0) - aws-sdk-resources (2.1.14) - aws-sdk-core (= 2.1.14) + aws-sdk-resources (2.7.4) + aws-sdk-core (= 2.7.4) + aws-sigv4 (1.0.0) bcrypt (3.1.10) bcrypt (3.1.10-x64-mingw32) bcrypt-ruby (3.1.5) @@ -194,8 +196,7 @@ GEM rake rdoc semver2 - jmespath (1.0.2) - multi_json (~> 1.0) + jmespath (1.3.1) json (1.8.6) jwt (1.5.6) kgio (2.10.0) diff --git a/app/models/post.rb b/app/models/post.rb index efac68d31..343a1926c 100644 --- a/app/models/post.rb +++ b/app/models/post.rb @@ -105,11 +105,11 @@ class Post < ActiveRecord::Base end def file_url - if cdn_hosted? - Danbooru.config.danbooru_s3_base_url + "/#{file_path_prefix}#{md5}.#{file_ext}" - else + # if cdn_hosted? + # Danbooru.config.danbooru_s3_base_url + "/#{file_path_prefix}#{md5}.#{file_ext}" + # else "/data/#{seo_tag_string}#{file_path_prefix}#{md5}.#{file_ext}" - end + # end end def large_file_url diff --git a/lib/tasks/images.rake b/lib/tasks/images.rake index 7c745ffd9..9d09cade0 100644 --- a/lib/tasks/images.rake +++ b/lib/tasks/images.rake @@ -1,23 +1,95 @@ require 'danbooru_image_resizer/danbooru_image_resizer' namespace :images do - desc "Enable CDN" - task :enable_cdn, [:min_id, :max_id] => :environment do |t, args| - CurrentUser.scoped(User.admins.first, "127.0.0.1") do - credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key) - Aws.config.update({ - region: "us-east-1", - credentials: credentials - }) - client = Aws::S3::Client.new - bucket = Danbooru.config.aws_s3_bucket_name + desc "Upload large images to S3" + task :upload_large_to_s3, [:min_id, :max_id] => :environment do |t, args| + min_id = args[:min_id] + max_id = args[:max_id] - Post.where("id >= ? and id <= ?", args[:min_id], args[:max_id]).find_each do |post| - post.cdn_hosted = true - post.save - key = File.basename(post.file_path) - client.copy_object(bucket: bucket, key: key, acl: "public-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY") - # client.put_object(bucket: bucket, key: key, body: body, content_md5: base64_md5, acl: "public-read", storage_class: "STANDARD") + credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key) + Aws.config.update({ + region: "us-west-2", + credentials: credentials + }) + client = Aws::S3::Client.new + bucket = "danbooru-large" + + Post.where("id >= ? and id <= ? and image_width > ?", min_id, max_id, Danbooru.config.large_image_width).find_each do |post| + if File.exists?(post.large_file_path) + key = File.basename(post.large_file_path) + body = open(post.large_file_path, "rb") + client.put_object(bucket: bucket, key: key, acl: "authenticated-read", body: body, content_md5: base64_md5) + end + end + end + + desc "Upload previews to S3" + task :upload_preview_to_s3, [:min_id, :max_id] => :environment do |t, args| + min_id = args[:min_id] + max_id = args[:max_id] + + credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key) + Aws.config.update({ + region: "us-west-2", + credentials: credentials + }) + client = Aws::S3::Client.new + bucket = "danbooru-preview" + + Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post| + if File.exists?(post.preview_file_path) + key = File.basename(post.preview_file_path) + body = open(post.large_file_path, "rb") + client.put_object(bucket: bucket, key: key, acl: "authenticated-read", body: body, content_md5: base64_md5) + end + end + end + + desc "Reset S3 + Storage Class" + task :reset_s3, [:min_id, :max_id] => :environment do |t, args| + min_id = args[:min_id] + max_id = args[:max_id] + + credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key) + Aws.config.update({ + region: "us-east-1", + credentials: credentials + }) + client = Aws::S3::Client.new + bucket = Danbooru.config.aws_s3_bucket_name + + Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post| + key = File.basename(post.file_path) + client.copy_object(bucket: bucket, key: key, acl: "authenticated-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY") + end + end + + task :restore_glacier, [:min_id, :max_id] => :environment do |t, args| + min_id = args[:min_id] # 10_001 + max_id = args[:max_id] # 50_000 + + credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key) + Aws.config.update({ + region: "us-east-1", + credentials: credentials + }) + client = Aws::S3::Client.new + bucket = Danbooru.config.aws_s3_bucket_name + + Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post| + key = File.basename(post.file_path) + begin + client.restore_object( + bucket: bucket, + key: key, + restore_request: { + days: 1, + glacier_job_parameters: { + tier: "Bulk" + } + } + ) + rescue Aws::S3::Errors::InvalidObjectState, Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::RestoreAlreadyInProgress end end end