Skip to content

S3: slim down service implementation #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 24 additions & 36 deletions lib/active_storage/service/s3_service.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,84 +4,72 @@
class ActiveStorage::Service::S3Service < ActiveStorage::Service
attr_reader :client, :bucket

def initialize(access_key_id:, secret_access_key:, region:, bucket:)
@client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region)
@bucket = @client.bucket(bucket)
def initialize(bucket:, client: nil, **client_options)
@bucket = bucket
@client = client || Aws::S3::Client.new(client_options)
end

def upload(key, io, checksum: nil)
instrument :upload, key, checksum: checksum do
begin
object_for(key).put(body: io, content_md5: checksum)
client.put_object bucket: bucket, key: key, body: io, content_md5: checksum
rescue Aws::S3::Errors::BadDigest
raise ActiveStorage::IntegrityError
end
end
end

def download(key)
def download(key, &block)
if block_given?
instrument :streaming_download, key do
stream(key, &block)
client.get_object bucket: bucket, key: key, &block
end
else
instrument :download, key do
object_for(key).get.body.read.force_encoding(Encoding::BINARY)
"".b.tap do |data|
client.get_object bucket: bucket, key: key, response_target: data
end
end
end
end

def delete(key)
instrument :delete, key do
object_for(key).delete
client.delete_object bucket: bucket, key: key
end
end

def exist?(key)
instrument :exist, key do |payload|
answer = object_for(key).exists?
payload[:exist] = answer
answer
payload[:exist] =
begin
client.head_object bucket: bucket, key: key
rescue Aws::S3::Errors::NoSuckKey
false
else
true
end
end
end

def url(key, expires_in:, disposition:, filename:)
instrument :url, key do |payload|
generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
payload[:url] = presigner.presigned_url :get_object,
bucket: bucket, key: key, expires_in: expires_in,
response_content_disposition: "#{disposition}; filename=\"#{filename}\""

payload[:url] = generated_url

generated_url
end
end

def url_for_direct_upload(key, expires_in:, content_type:, content_length:)
instrument :url, key do |payload|
generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
payload[:url] = presigner.presigned_url :put_object,
bucket: bucket, key: key, expires_in: expires_in,
content_type: content_type, content_length: content_length

payload[:url] = generated_url

generated_url
end
end

private
def object_for(key)
bucket.object(key)
end

# Reads the object for the given key in chunks, yielding each to the block.
def stream(key, options = {}, &block)
object = object_for(key)

chunk_size = 5.megabytes
offset = 0

while offset < object.content_length
yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
offset += chunk_size
end
def presigner
@presigner ||= Aws::S3::Presigner.new client: client
end
end