You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
149 lines
4.4 KiB
149 lines
4.4 KiB
1 year ago
|
require "rails_helper"
|
||
|
|
||
|
RSpec.describe BulkUpload::Sales::Year2023::CsvParser do
|
||
|
subject(:service) { described_class.new(path:) }
|
||
|
|
||
|
let(:file) { Tempfile.new }
|
||
|
let(:path) { file.path }
|
||
|
let(:log) { build(:sales_log, :completed, :with_uprn) }
|
||
|
|
||
|
context "when parsing csv with headers" do
|
||
|
before do
|
||
|
file.write("Question\n")
|
||
|
file.write("Additional info\n")
|
||
|
file.write("Values\n")
|
||
|
file.write("Can be empty?\n")
|
||
|
file.write("Type of letting the question applies to\n")
|
||
|
file.write("Duplicate check field?\n")
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:).default_2023_field_numbers_row)
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:).to_2023_csv_row)
|
||
|
file.rewind
|
||
|
end
|
||
|
|
||
|
it "returns correct offsets" do
|
||
|
expect(service.row_offset).to eq(7)
|
||
|
expect(service.col_offset).to eq(1)
|
||
|
end
|
||
|
|
||
|
it "parses csv correctly" do
|
||
|
expect(service.row_parsers[0].field_19).to eql(log.uprn)
|
||
|
end
|
||
|
end
|
||
|
|
||
|
context "when parsing csv with headers in arbitrary order" do
|
||
|
let(:seed) { rand }
|
||
|
|
||
|
before do
|
||
|
file.write("Question\n")
|
||
|
file.write("Additional info\n")
|
||
|
file.write("Values\n")
|
||
|
file.write("Can be empty?\n")
|
||
|
file.write("Type of letting the question applies to\n")
|
||
|
file.write("Duplicate check field?\n")
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:).default_2023_field_numbers_row(seed:))
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:).to_2023_csv_row(seed:))
|
||
|
file.rewind
|
||
|
end
|
||
|
|
||
|
it "returns correct offsets" do
|
||
|
expect(service.row_offset).to eq(7)
|
||
|
expect(service.col_offset).to eq(1)
|
||
|
end
|
||
|
|
||
|
it "parses csv correctly" do
|
||
|
expect(service.row_parsers[0].field_19).to eql(log.uprn)
|
||
|
end
|
||
|
end
|
||
|
|
||
|
context "when parsing csv without headers" do
|
||
|
let(:file) { Tempfile.new }
|
||
|
let(:path) { file.path }
|
||
|
let(:log) { build(:sales_log, :completed, :with_uprn) }
|
||
|
|
||
|
before do
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
|
||
|
file.rewind
|
||
|
end
|
||
|
|
||
|
it "returns correct offsets" do
|
||
|
expect(service.row_offset).to eq(0)
|
||
|
expect(service.col_offset).to eq(0)
|
||
|
end
|
||
|
|
||
|
it "parses csv correctly" do
|
||
|
expect(service.row_parsers[0].field_19).to eql(log.uprn)
|
||
|
end
|
||
|
end
|
||
|
|
||
|
context "when parsing with BOM aka byte order mark" do
|
||
|
let(:file) { Tempfile.new }
|
||
|
let(:path) { file.path }
|
||
|
let(:log) { build(:sales_log, :completed, :with_uprn) }
|
||
|
let(:bom) { "\uFEFF" }
|
||
|
|
||
|
before do
|
||
|
file.write(bom)
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
|
||
|
file.close
|
||
|
end
|
||
|
|
||
|
it "parses csv correctly" do
|
||
|
expect(service.row_parsers[0].field_19).to eql(log.uprn)
|
||
|
end
|
||
|
end
|
||
|
|
||
|
context "when an invalid byte sequence" do
|
||
|
let(:file) { Tempfile.new }
|
||
|
let(:path) { file.path }
|
||
|
let(:log) { build(:sales_log, :completed, :with_uprn) }
|
||
|
let(:invalid_sequence) { "\x81" }
|
||
|
|
||
|
before do
|
||
|
file.write(invalid_sequence)
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
|
||
|
file.close
|
||
|
end
|
||
|
|
||
|
it "parses csv correctly" do
|
||
|
expect(service.row_parsers[0].field_19).to eql(log.uprn)
|
||
|
end
|
||
|
end
|
||
|
|
||
|
describe "#column_for_field", aggregate_failures: true do
|
||
|
context "when headers present" do
|
||
|
before do
|
||
|
file.write("Question\n")
|
||
|
file.write("Additional info\n")
|
||
|
file.write("Values\n")
|
||
|
file.write("Can be empty?\n")
|
||
|
file.write("Type of letting the question applies to\n")
|
||
|
file.write("Duplicate check field?\n")
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:).default_2023_field_numbers_row)
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:).to_2023_csv_row)
|
||
|
file.rewind
|
||
|
end
|
||
|
|
||
|
it "returns correct column" do
|
||
|
expect(service.column_for_field("field_1")).to eql("CO")
|
||
|
expect(service.column_for_field("field_99")).to eql("EK")
|
||
|
end
|
||
|
end
|
||
|
|
||
|
context "when no headers" do
|
||
|
let(:file) { Tempfile.new }
|
||
|
let(:path) { file.path }
|
||
|
let(:log) { build(:sales_log, :completed, :with_uprn) }
|
||
|
|
||
|
before do
|
||
|
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
|
||
|
file.rewind
|
||
|
end
|
||
|
|
||
|
it "returns correct column" do
|
||
|
expect(service.column_for_field("field_1")).to eql("CN")
|
||
|
expect(service.column_for_field("field_99")).to eql("EJ")
|
||
|
end
|
||
|
end
|
||
|
end
|
||
|
end
|