25 changed files with 165 additions and 159 deletions
@ -1,24 +0,0 @@
|
||||
class ArchiveStorageService < StorageService |
||||
MAX_SIZE = 50 * (1024**2) # 50MiB |
||||
|
||||
def initialize(archive_io) |
||||
super() |
||||
@archive = Zip::File.open_buffer(archive_io) |
||||
end |
||||
|
||||
def list_files(folder) |
||||
@archive.glob(File.join(folder, "*.*")) |
||||
.map(&:name) |
||||
end |
||||
|
||||
def folder_present?(folder) |
||||
!list_files(folder).empty? |
||||
end |
||||
|
||||
def get_file_io(file_name) |
||||
entry = @archive.get_entry(file_name) |
||||
raise "File too large to be extracted" if entry.size > MAX_SIZE |
||||
|
||||
entry.get_input_stream |
||||
end |
||||
end |
@ -1,78 +0,0 @@
|
||||
class S3StorageService < StorageService |
||||
attr_reader :configuration |
||||
|
||||
def initialize(paas_config_service, paas_instance_name) |
||||
super() |
||||
@paas_config_service = paas_config_service |
||||
@paas_instance_name = (paas_instance_name || "").to_sym |
||||
@configuration = create_configuration |
||||
@client = create_client |
||||
end |
||||
|
||||
def list_files(folder) |
||||
@client.list_objects_v2(bucket: @configuration.bucket_name, prefix: folder) |
||||
.flat_map { |response| response.contents.map(&:key) } |
||||
end |
||||
|
||||
def folder_present?(folder) |
||||
response = @client.list_objects_v2(bucket: @configuration.bucket_name, prefix: folder, max_keys: 1) |
||||
response.key_count == 1 |
||||
end |
||||
|
||||
def get_file_io(file_name) |
||||
@client.get_object(bucket: @configuration.bucket_name, key: file_name) |
||||
.body |
||||
end |
||||
|
||||
def write_file(file_name, data) |
||||
@client.put_object( |
||||
body: data, |
||||
bucket: @configuration.bucket_name, |
||||
key: file_name, |
||||
) |
||||
end |
||||
|
||||
private |
||||
|
||||
def create_configuration |
||||
unless @paas_config_service.config_present? |
||||
raise "No PaaS configuration present" |
||||
end |
||||
unless @paas_config_service.s3_buckets.key?(@paas_instance_name) |
||||
raise "#{@paas_instance_name} instance name could not be found" |
||||
end |
||||
|
||||
bucket_config = @paas_config_service.s3_buckets[@paas_instance_name] |
||||
StorageConfiguration.new(bucket_config[:credentials]) |
||||
end |
||||
|
||||
def create_client |
||||
credentials = |
||||
Aws::Credentials.new( |
||||
@configuration.access_key_id, |
||||
@configuration.secret_access_key, |
||||
) |
||||
Aws::S3::Client.new( |
||||
region: @configuration.region, |
||||
credentials:, |
||||
) |
||||
end |
||||
end |
||||
|
||||
class StorageConfiguration |
||||
attr_reader :access_key_id, :secret_access_key, :bucket_name, :region |
||||
|
||||
def initialize(credentials) |
||||
@access_key_id = credentials[:aws_access_key_id] |
||||
@secret_access_key = credentials[:aws_secret_access_key] |
||||
@bucket_name = credentials[:bucket_name] |
||||
@region = credentials[:aws_region] |
||||
end |
||||
|
||||
def ==(other) |
||||
@access_key_id == other.access_key_id && |
||||
@secret_access_key == other.secret_access_key && |
||||
@bucket_name == other.bucket_name && |
||||
@region == other.region |
||||
end |
||||
end |
@ -0,0 +1,26 @@
|
||||
module Storage |
||||
class ArchiveService < StorageService |
||||
MAX_SIZE = 50 * (1024**2) # 50MiB |
||||
|
||||
def initialize(archive_io) |
||||
super() |
||||
@archive = Zip::File.open_buffer(archive_io) |
||||
end |
||||
|
||||
def list_files(folder) |
||||
@archive.glob(File.join(folder, "*.*")) |
||||
.map(&:name) |
||||
end |
||||
|
||||
def folder_present?(folder) |
||||
!list_files(folder).empty? |
||||
end |
||||
|
||||
def get_file_io(file_name) |
||||
entry = @archive.get_entry(file_name) |
||||
raise "File too large to be extracted" if entry.size > MAX_SIZE |
||||
|
||||
entry.get_input_stream |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,80 @@
|
||||
module Storage |
||||
class S3Service < StorageService |
||||
attr_reader :configuration |
||||
|
||||
def initialize(config_service, paas_instance_name) |
||||
super() |
||||
@config_service = config_service |
||||
@instance_name = (paas_instance_name || "").to_sym |
||||
@configuration = create_configuration |
||||
@client = create_client |
||||
end |
||||
|
||||
def list_files(folder) |
||||
@client.list_objects_v2(bucket: @configuration.bucket_name, prefix: folder) |
||||
.flat_map { |response| response.contents.map(&:key) } |
||||
end |
||||
|
||||
def folder_present?(folder) |
||||
response = @client.list_objects_v2(bucket: @configuration.bucket_name, prefix: folder, max_keys: 1) |
||||
response.key_count == 1 |
||||
end |
||||
|
||||
def get_file_io(file_name) |
||||
@client.get_object(bucket: @configuration.bucket_name, key: file_name) |
||||
.body |
||||
end |
||||
|
||||
def write_file(file_name, data) |
||||
@client.put_object( |
||||
body: data, |
||||
bucket: @configuration.bucket_name, |
||||
key: file_name, |
||||
) |
||||
end |
||||
|
||||
private |
||||
|
||||
def create_configuration |
||||
unless @config_service.config_present? |
||||
raise "No PaaS configuration present" |
||||
end |
||||
unless @config_service.s3_buckets.key?(@instance_name) |
||||
raise "#{@instance_name} instance name could not be found" |
||||
end |
||||
|
||||
bucket_config = @config_service.s3_buckets[@instance_name] |
||||
StorageConfiguration.new(bucket_config[:credentials]) |
||||
end |
||||
|
||||
def create_client |
||||
credentials = |
||||
Aws::Credentials.new( |
||||
@configuration.access_key_id, |
||||
@configuration.secret_access_key, |
||||
) |
||||
Aws::S3::Client.new( |
||||
region: @configuration.region, |
||||
credentials:, |
||||
) |
||||
end |
||||
end |
||||
|
||||
class StorageConfiguration |
||||
attr_reader :access_key_id, :secret_access_key, :bucket_name, :region |
||||
|
||||
def initialize(credentials) |
||||
@access_key_id = credentials[:aws_access_key_id] |
||||
@secret_access_key = credentials[:aws_secret_access_key] |
||||
@bucket_name = credentials[:bucket_name] |
||||
@region = credentials[:aws_region] |
||||
end |
||||
|
||||
def ==(other) |
||||
@access_key_id == other.access_key_id && |
||||
@secret_access_key == other.secret_access_key && |
||||
@bucket_name == other.bucket_name && |
||||
@region == other.region |
||||
end |
||||
end |
||||
end |
@ -0,0 +1,19 @@
|
||||
module Storage |
||||
class StorageService |
||||
def list_files(_folder) |
||||
raise NotImplementedError |
||||
end |
||||
|
||||
def folder_present?(_folder) |
||||
raise NotImplementedError |
||||
end |
||||
|
||||
def get_file_io(_file_name) |
||||
raise NotImplementedError |
||||
end |
||||
|
||||
def write_file(_file_name, _data) |
||||
raise NotImplementedError |
||||
end |
||||
end |
||||
end |
@ -1,17 +0,0 @@
|
||||
class StorageService |
||||
def list_files(_folder) |
||||
raise NotImplementedError |
||||
end |
||||
|
||||
def folder_present?(_folder) |
||||
raise NotImplementedError |
||||
end |
||||
|
||||
def get_file_io(_file_name) |
||||
raise NotImplementedError |
||||
end |
||||
|
||||
def write_file(_file_name, _data) |
||||
raise NotImplementedError |
||||
end |
||||
end |
@ -1,6 +1,6 @@
|
||||
require "rails_helper" |
||||
|
||||
RSpec.describe ArchiveStorageService do |
||||
RSpec.describe Storage::ArchiveService do |
||||
subject(:archive_service) { described_class.new(archive_content) } |
||||
|
||||
let(:compressed_folder) { "my_directory" } |
Loading…
Reference in new issue