forked from e621ng/e621ng
switch aws client libraries
This commit is contained in:
parent
63f4ecf0f1
commit
2c8cacd50e
4
Gemfile
4
Gemfile
@ -49,13 +49,11 @@ gem 'rubyzip', :require => "zip"
|
||||
gem 'coinbase'
|
||||
gem 'stripe'
|
||||
gem 'twitter'
|
||||
gem 'aws-sdk'
|
||||
|
||||
# needed for looser jpeg header compat
|
||||
gem 'ruby-imagespec', :require => "image_spec", :git => "https://github.com/r888888888/ruby-imagespec.git", :branch => "exif-fixes"
|
||||
|
||||
# needed for rails 4.1.0 compat
|
||||
gem 'aws-s3', :require => "aws/s3"
|
||||
|
||||
group :production do
|
||||
gem 'newrelic_rpm'
|
||||
gem 'unicorn', :platforms => :ruby
|
||||
|
12
Gemfile.lock
12
Gemfile.lock
@ -37,10 +37,10 @@ GEM
|
||||
addressable (2.3.6)
|
||||
arel (5.0.1.20140414130214)
|
||||
awesome_print (1.2.0)
|
||||
aws-s3 (0.6.3)
|
||||
builder
|
||||
mime-types
|
||||
xml-simple
|
||||
aws-sdk (1.28.1)
|
||||
json (~> 1.4)
|
||||
nokogiri (>= 1.4.4)
|
||||
uuidtools (~> 2.1)
|
||||
bcrypt (3.1.10)
|
||||
bcrypt (3.1.10-x64-mingw32)
|
||||
bcrypt-ruby (3.1.5)
|
||||
@ -290,6 +290,7 @@ GEM
|
||||
kgio (~> 2.6)
|
||||
rack
|
||||
raindrops (~> 0.7)
|
||||
uuidtools (2.1.4)
|
||||
vcr (2.9.0)
|
||||
webmock (1.17.4)
|
||||
addressable (>= 2.2.7)
|
||||
@ -298,7 +299,6 @@ GEM
|
||||
whenever (0.9.2)
|
||||
activesupport (>= 2.3.4)
|
||||
chronic (>= 0.6.3)
|
||||
xml-simple (1.1.5)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
@ -306,7 +306,7 @@ PLATFORMS
|
||||
|
||||
DEPENDENCIES
|
||||
awesome_print
|
||||
aws-s3
|
||||
aws-sdk
|
||||
bcrypt-ruby
|
||||
byebug
|
||||
capistrano
|
||||
|
@ -14,25 +14,28 @@ class AmazonBackup < ActiveRecord::Base
|
||||
|
||||
def self.execute
|
||||
last_id = AmazonBackup.last_id
|
||||
credentials = Aws::Credentials.new(Danbooru.config.amazon_s3_access_key_id, Danbooru.config.amazon_s3_secret_access_key)
|
||||
client = Aws::S3::Client.new(region: "us-west-2", credentials: credentials)
|
||||
bucket = Danbooru.config.amazon_s3_bucket_name
|
||||
|
||||
Post.where("id > ?", last_id).limit(1000).order("id").each do |post|
|
||||
AWS::S3::Base.establish_connection!(
|
||||
:access_key_id => Danbooru.config.amazon_s3_access_key_id,
|
||||
:secret_access_key => Danbooru.config.amazon_s3_secret_access_key,
|
||||
:server => "s3.amazonaws.com"
|
||||
)
|
||||
|
||||
if File.exists?(post.file_path)
|
||||
base64_md5 = Base64.encode64(Digest::MD5.digest(File.read(post.file_path)))
|
||||
AWS::S3::S3Object.store(File.basename(post.file_path), open(post.file_path, "rb"), Danbooru.config.amazon_s3_bucket_name, "Content-MD5" => base64_md5)
|
||||
key = File.basename(post.file_path)
|
||||
body = open(post.file_path, "rb")
|
||||
client.put_object(bucket: bucket, key: key, body: body, content_md5: base64_md5)
|
||||
end
|
||||
|
||||
if post.has_preview? && File.exists?(post.preview_file_path)
|
||||
AWS::S3::S3Object.store("preview/#{post.md5}.jpg", open(post.preview_file_path, "rb"), Danbooru.config.amazon_s3_bucket_name)
|
||||
key = "preview/#{post.md5}.jpg"
|
||||
body = open(post.preview_file_path, "rb")
|
||||
client.put_object(bucket: bucket, key: key, body: body)
|
||||
end
|
||||
|
||||
if File.exists?(post.large_file_path)
|
||||
AWS::S3::S3Object.store("large/#{post.md5}.#{post.large_file_ext}", open(post.large_file_path, "rb"), Danbooru.config.amazon_s3_bucket_name)
|
||||
key = "large/#{post.md5}.#{post.large_file_ext}"
|
||||
body = open(post.large_file_path, "rb")
|
||||
client.put_object(bucket: bucket, key: key, body: body)
|
||||
end
|
||||
|
||||
AmazonBackup.update_id(post.id)
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
require 'rubygems'
|
||||
require 'aws/s3'
|
||||
require 'aws-sdk/s3'
|
||||
|
||||
MAX_BACKUPS = 30
|
||||
|
||||
@ -19,14 +19,15 @@ end
|
||||
|
||||
access_key = open("#{home}/.s3/access_key").read.strip
|
||||
secret_access_key = open("#{home}/.s3/secret_access_key").read.strip
|
||||
credentials = Aws::Credentials.new(access_key, secret_access_key)
|
||||
client = Aws::S3::Client.new(region: "us-west-2", credentials: credentials)
|
||||
bucket = "danbooru-backup"
|
||||
|
||||
AWS::S3::Base.establish_connection!(:access_key_id => access_key, :secret_access_key => secret_access_key, :server => "s3.amazonaws.com")
|
||||
|
||||
current_backups = AWS::S3::Bucket.find("danbooru-backup").objects.map {|x| x.key}.select {|x| x =~ /^db-/}.sort.reverse
|
||||
current_backups = client.list_objects(buckets: buckets).contents.map {|x| x.key}.select {|x| x =~ /^db-/}.sort.reverse
|
||||
|
||||
if current_backups.size > MAX_BACKUPS
|
||||
current_backups[MAX_BACKUPS..-1].each do |old_backup|
|
||||
AWS::S3::S3Object.delete(old_backup, "danbooru-backup")
|
||||
client.delete_object(bucket: bucket, key: old_backup)
|
||||
puts "Deleted old backup #{old_backup}"
|
||||
end
|
||||
end
|
||||
@ -37,7 +38,7 @@ filename = data.mtime.strftime("db-%Y-%m-%d-%H-%M")
|
||||
tries = 0
|
||||
|
||||
begin
|
||||
AWS::S3::S3Object.store(filename, data, "danbooru-backup", :access => :private)
|
||||
client.put_object(bucket: bucket, key: filename, body: data, :acl => "private")
|
||||
rescue Errno::EPIPE
|
||||
tries += 1
|
||||
if tries > 3
|
||||
|
Loading…
Reference in New Issue
Block a user