forked from e621ng/e621ng
[Misc] Replace httparty with faraday
We already pull in faraday from opensearch-ruby. httparty hasn't had a release in a while and it printing warnings on ruby 3.3
This commit is contained in:
parent
58f9a18915
commit
d01c8c131d
6
Gemfile
6
Gemfile
@ -17,10 +17,8 @@ gem 'responders'
|
||||
gem 'dtext_rb', :git => "https://github.com/e621ng/dtext_rb.git", branch: "master", :require => "dtext"
|
||||
gem 'bootsnap'
|
||||
gem 'addressable'
|
||||
gem 'httparty'
|
||||
gem 'recaptcha', require: "recaptcha/rails"
|
||||
gem 'webpacker', '>= 4.0.x'
|
||||
gem 'retriable'
|
||||
gem 'sidekiq', '~> 7.0'
|
||||
gem 'marcel'
|
||||
# bookmarks for later, if they are needed
|
||||
@ -41,6 +39,10 @@ gem 'opensearch-ruby'
|
||||
|
||||
gem 'mailgun-ruby'
|
||||
|
||||
gem "faraday"
|
||||
gem "faraday-follow_redirects"
|
||||
gem "faraday-retry"
|
||||
|
||||
group :production do
|
||||
gem 'unicorn'
|
||||
gem 'unicorn-worker-killer'
|
||||
|
14
Gemfile.lock
14
Gemfile.lock
@ -146,8 +146,12 @@ GEM
|
||||
railties (>= 5.0.0)
|
||||
faraday (2.9.0)
|
||||
faraday-net_http (>= 2.0, < 3.2)
|
||||
faraday-follow_redirects (0.3.0)
|
||||
faraday (>= 1, < 3)
|
||||
faraday-net_http (3.1.0)
|
||||
net-http
|
||||
faraday-retry (2.2.1)
|
||||
faraday (~> 2.0)
|
||||
ffi (1.16.3)
|
||||
get_process_mem (0.2.7)
|
||||
ffi (~> 1.0)
|
||||
@ -157,9 +161,6 @@ GEM
|
||||
http-accept (1.7.0)
|
||||
http-cookie (1.0.5)
|
||||
domain_name (~> 0.5)
|
||||
httparty (0.21.0)
|
||||
mini_mime (>= 1.0.0)
|
||||
multi_xml (>= 0.5.2)
|
||||
i18n (1.14.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
io-console (0.7.2)
|
||||
@ -197,7 +198,6 @@ GEM
|
||||
ruby2_keywords (>= 0.0.5)
|
||||
msgpack (1.7.2)
|
||||
multi_json (1.15.0)
|
||||
multi_xml (0.6.0)
|
||||
mutex_m (0.2.0)
|
||||
net-http (0.4.1)
|
||||
uri
|
||||
@ -295,7 +295,6 @@ GEM
|
||||
http-cookie (>= 1.0.2, < 2.0)
|
||||
mime-types (>= 1.16, < 4.0)
|
||||
netrc (~> 0.8)
|
||||
retriable (3.1.2)
|
||||
rexml (3.2.6)
|
||||
rubocop (1.58.0)
|
||||
json (~> 2.3)
|
||||
@ -397,7 +396,9 @@ DEPENDENCIES
|
||||
draper
|
||||
dtext_rb!
|
||||
factory_bot_rails
|
||||
httparty
|
||||
faraday
|
||||
faraday-follow_redirects
|
||||
faraday-retry
|
||||
listen
|
||||
mailgun-ruby
|
||||
marcel
|
||||
@ -411,7 +412,6 @@ DEPENDENCIES
|
||||
redis
|
||||
request_store
|
||||
responders
|
||||
retriable
|
||||
rubocop
|
||||
rubocop-erb
|
||||
rubocop-rails
|
||||
|
@ -6,11 +6,11 @@ module CloudflareService
|
||||
end
|
||||
|
||||
def self.ips
|
||||
text, code = Cache.fetch("cloudflare_ips", expires_in: 24.hours) do
|
||||
resp = HTTParty.get(endpoint, Danbooru.config.httparty_options)
|
||||
[resp.body, resp.code]
|
||||
text, status = Cache.fetch("cloudflare_ips", expires_in: 24.hours) do
|
||||
resp = Faraday.new(Danbooru.config.faraday_options).get(endpoint)
|
||||
[resp.body, resp.status]
|
||||
end
|
||||
return [] if code != 200
|
||||
return [] if status != 200
|
||||
|
||||
json = JSON.parse(text, symbolize_names: true)
|
||||
ips = json[:result][:ipv4_cidrs] + json[:result][:ipv6_cidrs]
|
||||
|
@ -17,16 +17,8 @@ module DiscordReport
|
||||
end
|
||||
|
||||
def post_webhook
|
||||
HTTParty.post(
|
||||
webhook_url,
|
||||
body: {
|
||||
content: report,
|
||||
flags: 4096,
|
||||
}.to_json,
|
||||
headers: {
|
||||
"Content-Type" => "application/json",
|
||||
},
|
||||
)
|
||||
conn = Faraday.new(Danbooru.config.faraday_options)
|
||||
conn.post(webhook_url, { content: report, flags: 4096 }.to_json, { content_type: "application/json" })
|
||||
end
|
||||
|
||||
def formatted_number(input)
|
||||
|
@ -5,8 +5,6 @@ module Downloads
|
||||
include ActiveModel::Validations
|
||||
class Error < Exception ; end
|
||||
|
||||
RETRIABLE_ERRORS = [Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EIO, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, Timeout::Error, IOError]
|
||||
|
||||
attr_reader :url
|
||||
|
||||
validate :validate_url
|
||||
@ -22,40 +20,34 @@ module Downloads
|
||||
validate!
|
||||
end
|
||||
|
||||
def download!(tries: 3, **)
|
||||
Retriable.retriable(on: RETRIABLE_ERRORS, tries: tries, base_interval: 0) do
|
||||
http_get_streaming(uncached_url, **)
|
||||
def download!(max_size: Danbooru.config.max_file_size)
|
||||
file = Tempfile.new(binmode: true)
|
||||
conn = Faraday.new(Danbooru.config.faraday_options) do |f|
|
||||
f.response :follow_redirects, callback: ->(_old_env, new_env) { validate_uri_allowed!(new_env.url) }
|
||||
f.request :retry, max: 3, retry_block: ->(*) { file = Tempfile.new(binmode: true) }
|
||||
end
|
||||
|
||||
res = conn.get(uncached_url, nil, strategy.headers) do |req|
|
||||
req.options.on_data = ->(chunk, overall_recieved_bytes, env) do
|
||||
next if [301, 302].include?(env.status)
|
||||
|
||||
raise Error, "File is too large (max size: #{max_size})" if overall_recieved_bytes > max_size
|
||||
file.write(chunk)
|
||||
end
|
||||
end
|
||||
raise Error, "HTTP error code: #{res.code} #{res.message}" unless res.success?
|
||||
|
||||
file.rewind
|
||||
file
|
||||
end
|
||||
|
||||
def validate_url
|
||||
errors.add(:base, "URL must not be blank") if url.blank?
|
||||
errors.add(:base, "'#{url}' is not a valid url") if !url.host.present?
|
||||
errors.add(:base, "'#{url}' is not a valid url. Did you mean 'http://#{url}'?") if !url.scheme.in?(%w[http https])
|
||||
valid, reason = UploadWhitelist.is_whitelisted?(url)
|
||||
errors.add(:base, "'#{url}' is not whitelisted and can't be direct downloaded: #{reason}") if !valid
|
||||
validate_uri_allowed!(url)
|
||||
end
|
||||
|
||||
def http_get_streaming(url, file: Tempfile.new(binmode: true), max_size: Danbooru.config.max_file_size)
|
||||
size = 0
|
||||
|
||||
res = HTTParty.get(url, httparty_options) do |chunk|
|
||||
next if [301, 302].include?(chunk.code)
|
||||
|
||||
size += chunk.size
|
||||
raise Error.new("File is too large (max size: #{max_size})") if size > max_size && max_size > 0
|
||||
|
||||
file.write(chunk)
|
||||
end
|
||||
|
||||
if res.success?
|
||||
file.rewind
|
||||
return file
|
||||
else
|
||||
raise Error.new("HTTP error code: #{res.code} #{res.message}")
|
||||
end
|
||||
end # def
|
||||
|
||||
# Prevent Cloudflare from potentially mangling the image. See issue #3528.
|
||||
def uncached_url
|
||||
return file_url unless is_cloudflare?(file_url)
|
||||
@ -73,42 +65,21 @@ module Downloads
|
||||
@strategy ||= Sources::Strategies.find(url.to_s)
|
||||
end
|
||||
|
||||
def httparty_options
|
||||
{
|
||||
timeout: 10,
|
||||
stream_body: true,
|
||||
headers: strategy.headers,
|
||||
connection_adapter: ValidatingConnectionAdapter,
|
||||
}.deep_merge(Danbooru.config.httparty_options)
|
||||
end
|
||||
|
||||
def is_cloudflare?(url)
|
||||
ip_addr = IPAddr.new(Resolv.getaddress(url.hostname))
|
||||
CloudflareService.ips.any? { |subnet| subnet.include?(ip_addr) }
|
||||
end
|
||||
end
|
||||
|
||||
# Hook into HTTParty to validate the IP before following redirects.
|
||||
# https://www.rubydoc.info/github/jnunemaker/httparty/HTTParty/ConnectionAdapter
|
||||
class ValidatingConnectionAdapter < HTTParty::ConnectionAdapter
|
||||
def self.call(uri, options)
|
||||
def validate_uri_allowed!(uri)
|
||||
ip_addr = IPAddr.new(Resolv.getaddress(uri.hostname))
|
||||
|
||||
if ip_blocked?(ip_addr)
|
||||
if ip_addr.private? || ip_addr.loopback? || ip_addr.link_local?
|
||||
raise Downloads::File::Error, "Downloads from #{ip_addr} are not allowed"
|
||||
end
|
||||
|
||||
# Check whitelist here again, in case of open redirect vulnerabilities
|
||||
valid, _reason = UploadWhitelist.is_whitelisted?(Addressable::URI.parse(uri))
|
||||
valid, _reason = UploadWhitelist.is_whitelisted?(uri)
|
||||
unless valid
|
||||
raise Downloads::File::Error, "'#{uri}' is not whitelisted and can't be direct downloaded"
|
||||
end
|
||||
|
||||
super(uri, options)
|
||||
end
|
||||
|
||||
def self.ip_blocked?(ip_addr)
|
||||
ip_addr.private? || ip_addr.loopback? || ip_addr.link_local?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -16,10 +16,9 @@ module IqdbProxy
|
||||
end
|
||||
|
||||
def make_request(path, request_type, params = {})
|
||||
url = URI.parse(endpoint)
|
||||
url.path = path
|
||||
HTTParty.send(request_type, url, { body: params.to_json, headers: { "Content-Type" => "application/json" } })
|
||||
rescue Errno::ECONNREFUSED, Errno::EADDRNOTAVAIL, Errno::EHOSTUNREACH
|
||||
conn = Faraday.new(Danbooru.config.faraday_options)
|
||||
conn.send(request_type, endpoint + path, params.to_json, { content_type: "application/json" })
|
||||
rescue Faraday::Error
|
||||
raise Error, "This service is temporarily unavailable. Please try again later."
|
||||
end
|
||||
|
||||
@ -30,12 +29,12 @@ module IqdbProxy
|
||||
raise Error, "failed to generate thumb for #{post.id}" unless thumb
|
||||
|
||||
response = make_request("/images/#{post.id}", :post, get_channels_data(thumb))
|
||||
raise Error, "iqdb request failed" if response.code != 200
|
||||
raise Error, "iqdb request failed" if response.status != 200
|
||||
end
|
||||
|
||||
def remove_post(post_id)
|
||||
response = make_request("/images/#{post_id}", :delete)
|
||||
raise Error, "iqdb request failed" if response.code != 200
|
||||
raise Error, "iqdb request failed" if response.status != 200
|
||||
end
|
||||
|
||||
def query_url(image_url, score_cutoff)
|
||||
@ -56,16 +55,16 @@ module IqdbProxy
|
||||
return [] unless thumb
|
||||
|
||||
response = make_request("/query", :post, get_channels_data(thumb))
|
||||
return [] if response.code != 200
|
||||
return [] if response.status != 200
|
||||
|
||||
process_iqdb_result(response.parsed_response, score_cutoff)
|
||||
process_iqdb_result(JSON.parse(response.body), score_cutoff)
|
||||
end
|
||||
|
||||
def query_hash(hash, score_cutoff)
|
||||
response = make_request "/query", :post, { hash: hash }
|
||||
return [] if response.code != 200
|
||||
return [] if response.status != 200
|
||||
|
||||
process_iqdb_result(response.parsed_response, score_cutoff)
|
||||
process_iqdb_result(JSON.parse(response.body), score_cutoff)
|
||||
end
|
||||
|
||||
def process_iqdb_result(json, score_cutoff)
|
||||
|
@ -61,7 +61,7 @@ module Sources
|
||||
end
|
||||
|
||||
def headers
|
||||
{ "Referer" => "https://www.pixiv.net"}
|
||||
{ referer: "https://www.pixiv.net" }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -70,7 +70,7 @@ class UploadWhitelist < ApplicationRecord
|
||||
return [x.allowed, x.reason]
|
||||
end
|
||||
end
|
||||
[false, "#{url.domain} not in whitelist"]
|
||||
[false, "#{url.host} not in whitelist"]
|
||||
end
|
||||
|
||||
extend SearchMethods
|
||||
|
@ -572,17 +572,18 @@ module Danbooru
|
||||
# services will fail if you don't set a valid User-Agent.
|
||||
def http_headers
|
||||
{
|
||||
"User-Agent" => "#{Danbooru.config.safe_app_name}/#{Danbooru.config.version}",
|
||||
user_agent: "#{safe_app_name}/#{version}",
|
||||
}
|
||||
end
|
||||
|
||||
def httparty_options
|
||||
# proxy example:
|
||||
# {http_proxyaddr: "", http_proxyport: "", http_proxyuser: nil, http_proxypass: nil}
|
||||
# https://lostisland.github.io/faraday/#/customization/connection-options
|
||||
def faraday_options
|
||||
{
|
||||
timeout: 10,
|
||||
open_timout: 5,
|
||||
headers: Danbooru.config.http_headers,
|
||||
request: {
|
||||
timeout: 10,
|
||||
open_timeout: 5,
|
||||
},
|
||||
headers: http_headers,
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -33,9 +33,7 @@ ForumCategory.find_or_create_by!(name: "Tag Alias and Implication Suggestions")
|
||||
end
|
||||
|
||||
def api_request(path)
|
||||
response = HTTParty.get("https://e621.net#{path}", {
|
||||
headers: { "User-Agent" => "e621ng/seeding" },
|
||||
})
|
||||
response = Faraday.get("https://e621.net#{path}", nil, user_agent: "e621ng/seeding")
|
||||
JSON.parse(response.body)
|
||||
end
|
||||
|
||||
|
@ -21,11 +21,11 @@ module Downloads
|
||||
end
|
||||
|
||||
should "not follow redirects to non-whitelisted domains" do
|
||||
stub_request(:get, "https://example.com/file.png").to_return(status: 301, headers: { location: "https://e621.net" })
|
||||
stub_request(:get, "https://example.com/file.png").to_return(status: 301, headers: { location: "https://e621.net/abc" })
|
||||
error = assert_raises(Downloads::File::Error) do
|
||||
Downloads::File.new("https://example.com/file.png").download!
|
||||
end
|
||||
assert_match("'https://e621.net/' is not whitelisted", error.message)
|
||||
assert_match("'https://e621.net/abc' is not whitelisted", error.message)
|
||||
end
|
||||
end
|
||||
|
||||
@ -71,15 +71,19 @@ module Downloads
|
||||
|
||||
context "that fails" do
|
||||
should "retry three times before giving up" do
|
||||
download = Downloads::File.new("https://example.com")
|
||||
HTTParty.expects(:get).times(3).raises(Errno::ETIMEDOUT)
|
||||
assert_raises(Errno::ETIMEDOUT) { download.download! }
|
||||
download = Downloads::File.new("https://example.com/1")
|
||||
stub_request(:get, "https://example.com/1").to_raise(Errno::ETIMEDOUT).times(2).then.to_return(body: "foo")
|
||||
assert_equal("foo", download.download!.read)
|
||||
|
||||
download = Downloads::File.new("https://example.com/2")
|
||||
stub_request(:get, "https://example.com/2").to_raise(Errno::ETIMEDOUT).times(3)
|
||||
assert_raises(Faraday::Error) { download.download! }
|
||||
end
|
||||
|
||||
should "return an uncorrupted file on the second try" do
|
||||
source = "https://example.com"
|
||||
download = Downloads::File.new(source)
|
||||
stub_request(:get, source).to_raise(IOError).then.to_return(body: "abc")
|
||||
stub_request(:get, source).to_raise(Errno::ETIMEDOUT).then.to_return(body: "abc")
|
||||
|
||||
tempfile = download.download!
|
||||
assert_equal("abc", tempfile.read)
|
||||
|
Loading…
Reference in New Issue
Block a user