4 changed files with 5308 additions and 0 deletions
@ -0,0 +1,254 @@
|
||||
require "rails_helper" |
||||
|
||||
RSpec.describe BulkUpload::Lettings::Year2026::CsvParser do |
||||
subject(:service) { described_class.new(path:) } |
||||
|
||||
let(:file) { Tempfile.new } |
||||
let(:path) { file.path } |
||||
let(:log) { build(:lettings_log, :completed) } |
||||
|
||||
context "when parsing csv with headers" do |
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(7) |
||||
expect(service.col_offset).to eq(1) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
end |
||||
|
||||
context "when some csv headers are empty (and we don't care about them)" do |
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(7) |
||||
expect(service.col_offset).to eq(1) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv with headers with extra rows" do |
||||
before do |
||||
file.write("Section\n") |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.write("\n") |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(8) |
||||
expect(service.col_offset).to eq(1) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
|
||||
it "does not parse the last empty row" do |
||||
expect(service.row_parsers.count).to eq(1) |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv with headers in arbitrary order" do |
||||
let(:seed) { rand } |
||||
|
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).default_field_numbers_row_for_year(2026, seed:)) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).to_year_csv_row(2026, seed:)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(7) |
||||
expect(service.col_offset).to eq(1) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv with extra invalid headers" do |
||||
let(:seed) { rand } |
||||
let(:log_to_csv) { BulkUpload::LettingsLogToCsv.new(log:) } |
||||
let(:field_numbers) { log_to_csv.default_2026_field_numbers + %w[invalid_field_number] } |
||||
let(:field_values) { log_to_csv.to_2026_row + %w[value_for_invalid_field_number] } |
||||
|
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(log_to_csv.custom_field_numbers_row(seed:, field_numbers:)) |
||||
file.write(log_to_csv.to_custom_csv_row(seed:, field_values:)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
|
||||
it "counts the number of valid field numbers correctly" do |
||||
expect(service).to be_correct_field_count |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv without headers" do |
||||
before do |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:, col_offset: 0).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(0) |
||||
expect(service.col_offset).to eq(0) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
end |
||||
|
||||
context "when parsing with BOM aka byte order mark" do |
||||
let(:bom) { "\uFEFF" } |
||||
|
||||
before do |
||||
file.write(bom) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:, col_offset: 0).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
end |
||||
|
||||
context "when an invalid byte sequence" do |
||||
let(:invalid_sequence) { "\x81" } |
||||
|
||||
before do |
||||
file.write(invalid_sequence) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:, col_offset: 0).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv with carriage returns" do |
||||
before do |
||||
file.write("Question\r\n") |
||||
file.write("Additional info\r") |
||||
file.write("Values\r\n") |
||||
file.write("Can be empty?\r") |
||||
file.write("Type of letting the question applies to\r\n") |
||||
file.write("Duplicate check field?\r") |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_13).to eql(log.tenancycode) |
||||
end |
||||
end |
||||
|
||||
describe "#column_for_field", aggregate_failures: true do |
||||
context "when with headers using default ordering" do |
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct column" do |
||||
expect(service.column_for_field("field_5")).to eql("F") |
||||
expect(service.column_for_field("field_22")).to eql("W") |
||||
end |
||||
end |
||||
|
||||
context "when without headers using default ordering" do |
||||
before do |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:, col_offset: 0).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct column" do |
||||
expect(service.column_for_field("field_5")).to eql("E") |
||||
expect(service.column_for_field("field_22")).to eql("V") |
||||
end |
||||
end |
||||
|
||||
context "when with headers using custom ordering" do |
||||
let(:seed) { 123 } |
||||
|
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).default_field_numbers_row_for_year(2026, seed:)) |
||||
file.write(BulkUpload::LettingsLogToCsv.new(log:).to_year_csv_row(2026, seed:)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct column" do |
||||
expect(service.column_for_field("field_5")).to eql("B") |
||||
expect(service.column_for_field("field_22")).to eql("AS") |
||||
expect(service.column_for_field("field_26")).to eql("DG") |
||||
expect(service.column_for_field("field_25")).to eql("I") |
||||
end |
||||
end |
||||
end |
||||
end |
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,191 @@
|
||||
require "rails_helper" |
||||
|
||||
RSpec.describe BulkUpload::Sales::Year2026::CsvParser do |
||||
subject(:service) { described_class.new(path:) } |
||||
|
||||
let(:file) { Tempfile.new } |
||||
let(:path) { file.path } |
||||
let(:log) { build(:sales_log, :completed, :with_uprn) } |
||||
|
||||
context "when parsing csv with headers" do |
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).default_field_numbers_row_for_year(2025)) |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).to_year_csv_row(2025)) |
||||
file.write("\n") |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(7) |
||||
expect(service.col_offset).to eq(1) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_16).to eql(log.uprn) |
||||
end |
||||
|
||||
it "counts the number of valid field numbers correctly" do |
||||
expect(service).to be_correct_field_count |
||||
end |
||||
|
||||
it "does not parse the last empty row" do |
||||
expect(service.row_parsers.count).to eq(1) |
||||
end |
||||
end |
||||
|
||||
context "when some csv headers are empty (and we don't care about them)" do |
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.write("\n") |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(7) |
||||
expect(service.col_offset).to eq(1) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_16).to eql(log.uprn) |
||||
end |
||||
|
||||
it "counts the number of valid field numbers correctly" do |
||||
expect(service).to be_correct_field_count |
||||
end |
||||
|
||||
it "does not parse the last empty row" do |
||||
expect(service.row_parsers.count).to eq(1) |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv with headers in arbitrary order" do |
||||
let(:seed) { rand } |
||||
|
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).default_field_numbers_row_for_year(2025, seed:)) |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).to_year_csv_row(2025, seed:)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(7) |
||||
expect(service.col_offset).to eq(1) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_16).to eql(log.uprn) |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv without headers" do |
||||
let(:file) { Tempfile.new } |
||||
let(:path) { file.path } |
||||
let(:log) { build(:sales_log, :completed, :with_uprn) } |
||||
|
||||
before do |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct offsets" do |
||||
expect(service.row_offset).to eq(0) |
||||
expect(service.col_offset).to eq(0) |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_16).to eql(log.uprn) |
||||
end |
||||
end |
||||
|
||||
context "when parsing with BOM aka byte order mark" do |
||||
let(:file) { Tempfile.new } |
||||
let(:path) { file.path } |
||||
let(:log) { build(:sales_log, :completed, :with_uprn) } |
||||
let(:bom) { "\uFEFF" } |
||||
|
||||
before do |
||||
file.write(bom) |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_year_csv_row(2026)) |
||||
file.close |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_16).to eql(log.uprn) |
||||
end |
||||
end |
||||
|
||||
context "when an invalid byte sequence" do |
||||
let(:file) { Tempfile.new } |
||||
let(:path) { file.path } |
||||
let(:log) { build(:sales_log, :completed, :with_uprn) } |
||||
let(:invalid_sequence) { "\x81" } |
||||
|
||||
before do |
||||
file.write(invalid_sequence) |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_year_csv_row(2026)) |
||||
file.close |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_16).to eql(log.uprn) |
||||
end |
||||
end |
||||
|
||||
describe "#column_for_field", aggregate_failures: true do |
||||
context "when headers present" do |
||||
before do |
||||
file.write("Question\n") |
||||
file.write("Additional info\n") |
||||
file.write("Values\n") |
||||
file.write("Can be empty?\n") |
||||
file.write("Type of letting the question applies to\n") |
||||
file.write("Duplicate check field?\n") |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "returns correct column" do |
||||
expect(service.column_for_field("field_1")).to eql("B") |
||||
expect(service.column_for_field("field_99")).to eql("CV") |
||||
end |
||||
end |
||||
end |
||||
|
||||
context "when parsing csv with carriage returns" do |
||||
before do |
||||
file.write("Question\r\n") |
||||
file.write("Additional info\r") |
||||
file.write("Values\r\n") |
||||
file.write("Can be empty?\r") |
||||
file.write("Type of letting the question applies to\r\n") |
||||
file.write("Duplicate check field?\r") |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).default_field_numbers_row_for_year(2026)) |
||||
file.write(BulkUpload::SalesLogToCsv.new(log:).to_year_csv_row(2026)) |
||||
file.rewind |
||||
end |
||||
|
||||
it "parses csv correctly" do |
||||
expect(service.row_parsers[0].field_16).to eql(log.uprn) |
||||
end |
||||
end |
||||
end |
||||
Loading…
Reference in new issue