diff --git a/app/services/exports/case_log_export_service.rb b/app/services/exports/case_log_export_service.rb
index 959adc79f..54f0b96ed 100644
--- a/app/services/exports/case_log_export_service.rb
+++ b/app/services/exports/case_log_export_service.rb
@@ -15,7 +15,14 @@ module Exports
@logger = logger
end
- def export_case_logs(full_update: false)
+ def export_csv_case_logs
+ time_str = Time.zone.now.strftime("%F").underscore
+ case_logs = retrieve_case_logs(Time.zone.now, true)
+ csv_io = build_export_csv(case_logs)
+ @storage_service.write_file("export_#{time_str}.csv", csv_io)
+ end
+
+ def export_xml_case_logs(full_update: false)
start_time = Time.zone.now
case_logs = retrieve_case_logs(start_time, full_update)
export = build_export_run(start_time, full_update)
@@ -26,12 +33,6 @@ module Exports
export.save!
end
- def is_omitted_field?(field_name)
- omitted_attrs = %w[ethnic_group]
- pattern_age = /age\d_known/
- field_name.starts_with?("details_known_") || pattern_age.match(field_name) || omitted_attrs.include?(field_name) ? true : false
- end
-
private
def get_daily_run_number
@@ -157,18 +158,56 @@ module Exports
xml_doc_to_temp_file(doc)
end
+ def apply_cds_transformation!(attribute_hash)
+ # OLD_CORE FORM ID support
+ attribute_hash["form"] = attribute_hash["old_form_id"]
+ attribute_hash["newform"] = attribute_hash["id"] + LOG_ID_OFFSET
+ # Age refused
+ (1..8).each do |index|
+ attribute_hash["age#{index}"] = -9 if attribute_hash["age#{index}_known"] == 1
+ end
+ end
+
+ def filter_keys!(attributes)
+ attributes.reject! { |attribute| is_omitted_field?(attribute) }
+ end
+
+ def is_omitted_field?(field_name)
+ omitted_attrs = %w[id old_form_id old_id ethnic_group]
+ pattern_age = /age\d_known/
+ field_name.starts_with?("details_known_") || pattern_age.match(field_name) || omitted_attrs.include?(field_name) ? true : false
+ end
+
+ def build_export_csv(case_logs)
+ csv_io = CSV.generate do |csv|
+ attribute_keys = nil
+ case_logs.each do |case_log|
+ attribute_hash = case_log.attributes_before_type_cast
+ apply_cds_transformation!(attribute_hash)
+ if attribute_keys.nil?
+ attribute_keys = attribute_hash.keys
+ filter_keys!(attribute_keys)
+ csv << attribute_keys
+ end
+ csv << attribute_keys.map { |attribute_key| attribute_hash[attribute_key] }
+ end
+ end
+
+ StringIO.new(csv_io)
+ end
+
def build_export_xml(case_logs)
doc = Nokogiri::XML("")
case_logs.each do |case_log|
+ attribute_hash = case_log.attributes_before_type_cast
+ apply_cds_transformation!(attribute_hash)
form = doc.create_element("form")
doc.at("forms") << form
- case_log.attributes.each do |key, _|
+ attribute_hash.each do |key, value|
if is_omitted_field?(key)
next
else
- value = case_log.read_attribute_before_type_cast(key)
- value += LOG_ID_OFFSET if key == "id"
form << doc.create_element(key, value)
end
end
diff --git a/lib/tasks/data_export.rake b/lib/tasks/data_export.rake
index 7e4f766c2..36a3a658c 100644
--- a/lib/tasks/data_export.rake
+++ b/lib/tasks/data_export.rake
@@ -1,8 +1,16 @@
namespace :core do
desc "Export data XMLs for import into Central Data System (CDS)"
- task :data_export, %i[full_update] => :environment do |_task, args|
- storage_service = StorageService.new(PaasConfigurationService.new, ENV["EXPORT_PAAS_INSTANCE"])
+ task :data_export, %i[format full_update] => :environment do |_task, args|
+ format = args[:format]
full_update = args[:full_update].present? && args[:full_update] == "true"
- Exports::CaseLogExportService.new(storage_service).export_case_logs(full_update:)
+
+ storage_service = StorageService.new(PaasConfigurationService.new, ENV["EXPORT_PAAS_INSTANCE"])
+ export_service = Exports::CaseLogExportService.new(storage_service)
+
+ if format.present? && format == "CSV"
+ export_service.export_csv_case_logs
+ else
+ export_service.export_xml_case_logs(full_update:)
+ end
end
end
diff --git a/spec/fixtures/exports/case_logs.csv b/spec/fixtures/exports/case_logs.csv
new file mode 100644
index 000000000..5817cf75f
--- /dev/null
+++ b/spec/fixtures/exports/case_logs.csv
@@ -0,0 +1,2 @@
+status,created_at,updated_at,tenant_code,age1,sex1,ethnic,national,prevten,ecstat1,hhmemb,age2,sex2,ecstat2,age3,sex3,ecstat3,age4,sex4,ecstat4,age5,sex5,ecstat5,age6,sex6,ecstat6,age7,sex7,ecstat7,age8,sex8,ecstat8,homeless,underoccupation_benefitcap,leftreg,reservist,illness,preg_occ,tenancy_code,startertenancy,tenancylength,tenancy,ppostcode_full,rsnvac,unittype_gn,beds,offered,wchair,earnings,incfreq,benefits,period,layear,waityear,postcode_full,reasonpref,cbl,chr,cap,reasonother,housingneeds_a,housingneeds_b,housingneeds_c,housingneeds_f,housingneeds_g,housingneeds_h,illness_type_1,illness_type_2,illness_type_3,illness_type_4,illness_type_8,illness_type_5,illness_type_6,illness_type_7,illness_type_9,illness_type_10,rp_homeless,rp_insan_unsat,rp_medwel,rp_hardship,rp_dontknow,tenancyother,net_income_value_check,property_owner_organisation,property_manager_organisation,sale_or_letting,irproduct_other,purchaser_code,reason,propcode,majorrepairs,la,prevloc,hb,hbrentshortfall,property_relet,mrcdate,incref,sale_completion_date,startdate,armedforces,first_time_property_let_as_social_housing,unitletas,builtype,voiddate,owning_organisation_id,managing_organisation_id,renttype,needstype,lettype,postcode_known,is_la_inferred,totchild,totelder,totadult,net_income_known,nocharge,is_carehome,household_charge,referral,brent,scharge,pscharge,supcharg,tcharge,tshortfall,chcharge,declaration,previous_postcode_known,previous_la_known,is_previous_la_inferred,ethnic_other,letting_allocation_unknown,rent_type,has_benefits,renewal,wrent,wscharge,wpschrge,wsupchrg,wtcharge,wtshortfall,refused,housingneeds,wchchrg,newprop,relat2,relat3,relat4,relat5,relat6,relat7,relat8,rent_value_check,lar,irproduct,joint,created_by_id,illness_type_0,tshortfall_known,shelteredaccom,retirement_value_check,form,newform
+2,2022-02-08 16:52:15 UTC,2022-02-08 16:52:15 UTC,BZ737,35,F,2,4,6,0,2,32,M,6,,,,,,,,,,,,,,,,,,,1,0,1,0,1,2,BZ757,0,5,1,SE26RT,6,7,3,2,1,68,1,1,2,2,1,NW15TY,1,1,1,0,,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,,,Test,Test,,,798794,4,123,1,E09000003,E07000105,6,1,0,2020-05-05 10:36:49 UTC,0,,2022-02-02 10:36:49 UTC,1,,2,1,2019-11-03 00:00:00 UTC,{owning_org_id},{managing_org_id},2,1,7,1,false,0,0,2,1,0,0,,,200.0,50.0,40.0,35.0,325.0,12.0,,1,1,1,false,,,1,1,0,100.0,25.0,20.0,17.5,162.5,6.0,0,1,,2,P,,,,,,,,,,,{created_by_id},,0,0,,,{id}
diff --git a/spec/fixtures/exports/case_logs.xml b/spec/fixtures/exports/case_logs.xml
index e46027264..e2110fefd 100644
--- a/spec/fixtures/exports/case_logs.xml
+++ b/spec/fixtures/exports/case_logs.xml
@@ -1,7 +1,6 @@
+ {id}
1
diff --git a/spec/lib/tasks/data_export_spec.rb b/spec/lib/tasks/data_export_spec.rb
index bec366c6c..69c70234c 100644
--- a/spec/lib/tasks/data_export_spec.rb
+++ b/spec/lib/tasks/data_export_spec.rb
@@ -21,13 +21,23 @@ describe "rake core:data_export", type: task do
allow(ENV).to receive(:[]).with("EXPORT_PAAS_INSTANCE").and_return(paas_instance)
end
- context "when exporting case logs" do
- it "starts the export process" do
+ context "when exporting case logs with no parameters" do
+ it "starts the XML export process" do
expect(StorageService).to receive(:new).with(paas_config_service, paas_instance)
expect(Exports::CaseLogExportService).to receive(:new).with(storage_service)
- expect(export_service).to receive(:export_case_logs)
+ expect(export_service).to receive(:export_xml_case_logs)
task.invoke
end
end
+
+ context "when exporting case logs with CSV format" do
+ it "starts the CSV export process" do
+ expect(StorageService).to receive(:new).with(paas_config_service, paas_instance)
+ expect(Exports::CaseLogExportService).to receive(:new).with(storage_service)
+ expect(export_service).to receive(:export_csv_case_logs)
+
+ task.invoke("CSV", "false")
+ end
+ end
end
diff --git a/spec/services/exports/case_log_export_service_spec.rb b/spec/services/exports/case_log_export_service_spec.rb
index 1efe5e28a..d3cc2c2f8 100644
--- a/spec/services/exports/case_log_export_service_spec.rb
+++ b/spec/services/exports/case_log_export_service_spec.rb
@@ -1,15 +1,18 @@
require "rails_helper"
RSpec.describe Exports::CaseLogExportService do
+ subject(:export_service) { described_class.new(storage_service) }
+
let(:storage_service) { instance_double(StorageService) }
- let(:export_file) { File.open("spec/fixtures/exports/case_logs.xml", "r:UTF-8") }
+ let(:xml_export_file) { File.open("spec/fixtures/exports/case_logs.xml", "r:UTF-8") }
let(:local_manifest_file) { File.open("spec/fixtures/exports/manifest.xml", "r:UTF-8") }
let(:expected_master_manifest_filename) { "Manifest_2022_05_01_0001.csv" }
let(:expected_master_manifest_rerun) { "Manifest_2022_05_01_0002.csv" }
let(:expected_zip_filename) { "core_2021_2022_jan_mar_f0001_inc0001.zip" }
let(:expected_manifest_filename) { "manifest.xml" }
+ let(:start_time) { Time.zone.local(2022, 5, 1) }
def replace_entity_ids(case_log, export_template)
export_template.sub!(/\{id\}/, (case_log["id"] + Exports::CaseLogExportService::LOG_ID_OFFSET).to_s)
@@ -22,20 +25,16 @@ RSpec.describe Exports::CaseLogExportService do
export_template.sub!(/\{recno\}/, record_number.to_s)
end
- context "when exporting daily case logs" do
- subject(:export_service) { described_class.new(storage_service) }
-
- let(:start_time) { Time.zone.local(2022, 5, 1) }
-
- before do
- Timecop.freeze(start_time)
- allow(storage_service).to receive(:write_file)
- end
+ before do
+ Timecop.freeze(start_time)
+ allow(storage_service).to receive(:write_file)
+ end
+ context "when exporting daily case logs in XML" do
context "and no case logs is available for export" do
it "generates a master manifest with the correct name" do
expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
it "generates a master manifest with CSV headers but no data" do
@@ -43,7 +42,7 @@ RSpec.describe Exports::CaseLogExportService do
expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\n"
allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
- export_service.export_case_logs
+ export_service.export_xml_case_logs
expect(actual_content).to eq(expected_content)
end
end
@@ -54,7 +53,7 @@ RSpec.describe Exports::CaseLogExportService do
it "generates a ZIP export file with the expected filename" do
expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args)
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
it "generates an XML manifest file with the expected filename within the ZIP file" do
@@ -63,7 +62,7 @@ RSpec.describe Exports::CaseLogExportService do
expect(entry).not_to be_nil
expect(entry.name).to eq(expected_manifest_filename)
end
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
it "generates an XML export file with the expected filename within the ZIP file" do
@@ -72,7 +71,7 @@ RSpec.describe Exports::CaseLogExportService do
expect(entry).not_to be_nil
expect(entry.name).to eq(expected_data_filename)
end
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
it "generates an XML manifest file with the expected content within the ZIP file" do
@@ -83,18 +82,18 @@ RSpec.describe Exports::CaseLogExportService do
expect(entry.get_input_stream.read).to eq(expected_content)
end
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
it "generates an XML export file with the expected content within the ZIP file" do
- expected_content = replace_entity_ids(case_log, export_file.read)
+ expected_content = replace_entity_ids(case_log, xml_export_file.read)
expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
entry = Zip::File.open_buffer(content).find_entry(expected_data_filename)
expect(entry).not_to be_nil
expect(entry.get_input_stream.read).to eq(expected_content)
end
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
end
@@ -111,7 +110,7 @@ RSpec.describe Exports::CaseLogExportService do
expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args)
expect(storage_service).to receive(:write_file).with(expected_zip_filename2, any_args)
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
end
end
@@ -130,11 +129,11 @@ RSpec.describe Exports::CaseLogExportService do
expect(entry.get_input_stream.read).to eq(expected_content)
end
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
it "creates a logs export record in a database with correct time" do
- expect { export_service.export_case_logs }
+ expect { export_service.export_xml_case_logs }
.to change(LogsExport, :count).by(1)
expect(LogsExport.last.started_at).to eq(start_time)
end
@@ -146,7 +145,7 @@ RSpec.describe Exports::CaseLogExportService do
expect(csv&.count).to be > 0
end
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
end
@@ -161,7 +160,7 @@ RSpec.describe Exports::CaseLogExportService do
csv = CSV.parse(csv_content, headers: true)
expect(csv&.count).to eq(0)
end
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
end
end
@@ -169,22 +168,22 @@ RSpec.describe Exports::CaseLogExportService do
context "and a previous export has run the same day having case logs" do
before do
FactoryBot.create(:case_log, startdate: Time.zone.local(2022, 2, 1))
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
it "increments the master manifest number by 1" do
expect(storage_service).to receive(:write_file).with(expected_master_manifest_rerun, any_args)
- export_service.export_case_logs
+ export_service.export_xml_case_logs
end
context "and we trigger another full update" do
it "increments the base number" do
- export_service.export_case_logs(full_update: true)
+ export_service.export_xml_case_logs(full_update: true)
expect(LogsExport.last.base_number).to eq(2)
end
it "resets the increment number" do
- export_service.export_case_logs(full_update: true)
+ export_service.export_xml_case_logs(full_update: true)
expect(LogsExport.last.increment_number).to eq(1)
end
@@ -193,21 +192,21 @@ RSpec.describe Exports::CaseLogExportService do
csv = CSV.parse(csv_content, headers: true)
expect(csv&.count).to be > 0
end
- export_service.export_case_logs(full_update: true)
+ export_service.export_xml_case_logs(full_update: true)
end
it "generates a ZIP export file with the expected filename" do
expect(storage_service).to receive(:write_file).with("core_2021_2022_jan_mar_f0002_inc0001.zip", any_args)
- export_service.export_case_logs(full_update: true)
+ export_service.export_xml_case_logs(full_update: true)
end
end
end
context "and a previous export has run having no case logs" do
- before { export_service.export_case_logs }
+ before { export_service.export_xml_case_logs }
it "doesn't increment the manifest number by 1" do
- export_service.export_case_logs
+ export_service.export_xml_case_logs
expect(LogsExport.last.increment_number).to eq(1)
end
@@ -217,10 +216,26 @@ RSpec.describe Exports::CaseLogExportService do
before { allow(storage_service).to receive(:write_file).and_raise(StandardError.new("This is an exception")) }
it "does not save a record in the database" do
- expect { export_service.export_case_logs }
+ expect { export_service.export_xml_case_logs }
.to raise_error(StandardError)
.and(change(LogsExport, :count).by(0))
end
end
end
+
+ context "when export case logs in CSV" do
+ let(:csv_export_file) { File.open("spec/fixtures/exports/case_logs.csv", "r:UTF-8") }
+ let(:expected_csv_filename) { "export_2022_05_01.csv" }
+
+ let(:case_log) { FactoryBot.create(:case_log, :completed) }
+
+ it "generates an CSV export file with the expected content" do
+ expected_content = replace_entity_ids(case_log, csv_export_file.read)
+ expect(storage_service).to receive(:write_file).with(expected_csv_filename, any_args) do |_, content|
+ expect(content).not_to be_nil
+ expect(content.read).to eq(expected_content)
+ end
+ export_service.export_csv_case_logs
+ end
+ end
end