Browse Source

CLDC-2316 Bulk upload sales 2023 (#1603)

# Context

- https://digital.dclg.gov.uk/jira/browse/CLDC-2316
- Implement bulk upload sales for new collection year 2023
- This is a first pass implementation and will probably have some bugs in it and we can address over time

# Changes

- Add CSV parser for sales 2023 to handle CSV structure
- Tweak collection window validation so error now contextual to year selected for upload
- Handle arbitrary ordering of CSV columns
- Fix ordering of errors in report and now ordered by cell
- Added `Upload your file again` link styled as button on error report to match lettings experience
- Update tooling to convert logs to 2023 csv rows with support for random column ordering

# Known issues

- There seem to be some issues with how UPRN is handled if the UPRN cannot be validated.
- For the above I think there is dependency on https://github.com/communitiesuk/submit-social-housing-lettings-and-sales-data/pull/1570 which should clear any errored fields so users can continue to create logs and fix within the service
pull/1592/head
Phil Lee 2 years ago committed by GitHub
parent
commit
d939e88435
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      app/controllers/lettings_logs_controller.rb
  2. 4
      app/models/bulk_upload.rb
  3. 6
      app/services/bulk_upload/lettings/year2022/row_parser.rb
  4. 6
      app/services/bulk_upload/lettings/year2023/row_parser.rb
  5. 8
      app/services/bulk_upload/sales/year2022/row_parser.rb
  6. 91
      app/services/bulk_upload/sales/year2023/csv_parser.rb
  7. 1159
      app/services/bulk_upload/sales/year2023/row_parser.rb
  8. 4
      app/views/bulk_upload_sales_results/show.html.erb
  9. 2
      config/locales/en.yml
  10. 21
      spec/controllers/lettings_logs_controller_spec.rb
  11. 3
      spec/factories/sales_log.rb
  12. 6
      spec/requests/lettings_logs_controller_spec.rb
  13. 148
      spec/services/bulk_upload/sales/year2023/csv_parser_spec.rb
  14. 704
      spec/services/bulk_upload/sales/year2023/row_parser_spec.rb
  15. 41
      spec/support/bulk_upload/sales_log_to_csv.rb

2
app/controllers/lettings_logs_controller.rb

@ -118,7 +118,7 @@ private
end
def redirect_if_bulk_upload_resolved
if @bulk_upload && @bulk_upload.lettings_logs.in_progress.count.zero?
if @bulk_upload && @bulk_upload.lettings? && @bulk_upload.lettings_logs.in_progress.count.zero?
redirect_to resume_bulk_upload_lettings_result_path(@bulk_upload)
end
end

4
app/models/bulk_upload.rb

@ -14,6 +14,10 @@ class BulkUpload < ApplicationRecord
"#{year}/#{year - 2000 + 1}"
end
def end_year
year + 1
end
def logs
if lettings?
lettings_logs

6
app/services/bulk_upload/lettings/year2022/row_parser.rb

@ -691,9 +691,9 @@ private
return if start_date.blank? || bulk_upload.form.blank?
unless bulk_upload.form.valid_start_date_for_form?(start_date)
errors.add(:field_96, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_97, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_98, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_96, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
errors.add(:field_97, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
errors.add(:field_98, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
end
end

6
app/services/bulk_upload/lettings/year2023/row_parser.rb

@ -635,9 +635,9 @@ private
return if start_date.blank? || bulk_upload.form.blank?
unless bulk_upload.form.valid_start_date_for_form?(start_date)
errors.add(:field_7, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_8, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_9, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_7, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
errors.add(:field_8, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
errors.add(:field_9, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
end
end

8
app/services/bulk_upload/sales/year2022/row_parser.rb

@ -811,7 +811,7 @@ private
def soctenant
return unless field_39 && field_113
if (field_39 == 1 || fields_39 == 2) && field_113 == 1
if (field_39 == 1 || field_39 == 2) && field_113 == 1
1
elsif field_113 == 1
2
@ -941,9 +941,9 @@ private
return if saledate.blank? || bulk_upload.form.blank?
unless bulk_upload.form.valid_start_date_for_form?(saledate)
errors.add(:field_2, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_3, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_4, I18n.t("validations.date.outside_collection_window"), category: :setup)
errors.add(:field_2, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
errors.add(:field_3, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
errors.add(:field_4, I18n.t("validations.date.outside_collection_window", year_combo: bulk_upload.year_combo, start_year: bulk_upload.year, end_year: bulk_upload.end_year), category: :setup)
end
end

91
app/services/bulk_upload/sales/year2023/csv_parser.rb

@ -0,0 +1,91 @@
require "csv"
class BulkUpload::Sales::Year2023::CsvParser
MIN_COLUMNS = 135
MAX_COLUMNS = 142
attr_reader :path
def initialize(path:)
@path = path
end
def row_offset
if with_headers?
rows.find_index { |row| row[0].match(/field number/i) } + 1
else
0
end
end
def col_offset
with_headers? ? 1 : 0
end
def cols
@cols ||= ("A".."EK").to_a
end
def row_parsers
@row_parsers ||= body_rows.map do |row|
stripped_row = row[col_offset..]
hash = Hash[field_numbers.zip(stripped_row)]
BulkUpload::Sales::Year2023::RowParser.new(hash)
end
end
def body_rows
rows[row_offset..]
end
def rows
@rows ||= CSV.parse(normalised_string, row_sep:)
end
def column_for_field(field)
cols[field_numbers.find_index(field) + col_offset]
end
private
def default_field_numbers
[6, 3, 4, 5, nil, 28, 30, 38, 47, 51, 55, 59, 31, 39, 48, 52, 56, 60, 37, 46, 50, 54, 58, 35, 43, 49, 53, 57, 61, 32, 33, 78, 80, 79, 81, 83, 84, nil, 62, 66, 64, 65, 63, 67, 69, 70, 68, 76, 77, 16, 17, 18, 26, 24, 25, 27, 8, 91, 95, 96, 97, 92, 93, 94, 98, 100, 101, 103, 104, 106, 110, 111, 112, 113, 114, 9, 116, 117, 118, 120, 124, 125, 126, 10, 11, nil, 127, 129, 133, 134, 135, 1, 2, nil, 73, nil, 75, 107, 108, 121, 122, 130, 131, 82, 109, 123, 132, 115, 15, 86, 87, 29, 7, 12, 13, 14, 36, 44, 45, 88, 89, 102, 105, 119, 128, 19, 20, 21, 22, 23, 34, 40, 41, 42, 71, 72, 74, 85, 90, 99].map do |number|
if number.to_s.match?(/^[0-9]+$/)
"field_#{number}"
else
"field_blank"
end
end
end
def field_numbers
@field_numbers ||= if with_headers?
rows[row_offset - 1][col_offset..].map { |number| number.to_s.match?(/^[0-9]+$/) ? "field_#{number}" : "field_blank" }
else
default_field_numbers
end
end
def headers
@headers ||= ("field_1".."field_135").to_a
end
def with_headers?
rows.map { |r| r[0] }.any? { |cell| cell&.match?(/field number/i) }
end
def row_sep
"\n"
end
def normalised_string
return @normalised_string if @normalised_string
@normalised_string = File.read(path, encoding: "bom|utf-8")
@normalised_string.gsub!("\r\n", "\n")
@normalised_string.scrub!("")
@normalised_string
end
end

1159
app/services/bulk_upload/sales/year2023/row_parser.rb

File diff suppressed because it is too large Load Diff

4
app/views/bulk_upload_sales_results/show.html.erb

@ -13,8 +13,10 @@
<div class="govuk-grid-row">
<div class="govuk-grid-column-full">
<% @bulk_upload.bulk_upload_errors.group_by(&:row).each do |_row, errors_for_row| %>
<% @bulk_upload.bulk_upload_errors.order_by_cell.group_by(&:row).each do |_row, errors_for_row| %>
<%= render BulkUploadErrorRowComponent.new(bulk_upload_errors: errors_for_row) %>
<% end %>
</div>
</div>
<%= govuk_button_link_to "Upload your file again", start_bulk_upload_sales_logs_path %>

2
config/locales/en.yml

@ -165,7 +165,7 @@ en:
date:
invalid_date: "Enter a date in the correct format, for example 31 1 2022"
outside_collection_window: Enter a date within the 22/23 collection year, which is between 1st April 2022 and 31st March 2023
outside_collection_window: Enter a date within the %{year_combo} collection year, which is between 1st April %{start_year} and 31st March %{end_year}
postcode: "Enter a postcode in the correct format, for example AA1 1AA"
location_admin_district: "Select a local authority"
email:

21
spec/controllers/lettings_logs_controller_spec.rb

@ -0,0 +1,21 @@
require "rails_helper"
RSpec.describe LettingsLogsController do
before do
sign_in bulk_upload.user
end
describe "#index" do
context "when a sales bulk upload filter is applied" do
let(:bulk_upload) { create(:bulk_upload, :sales) }
it "does not redirect to resume path" do
session[:logs_filters] = { bulk_upload_id: [bulk_upload.id.to_s] }.to_json
get :index
expect(response).to be_successful
end
end
end
end

3
spec/factories/sales_log.rb

@ -126,5 +126,8 @@ FactoryBot.define do
mortgagelender { 5 }
extrabor { 1 }
end
trait :with_uprn do
uprn { rand(999_999_999_999).to_s }
end
end
end

6
spec/requests/lettings_logs_controller_spec.rb

@ -438,7 +438,7 @@ RSpec.describe LettingsLogsController, type: :request do
let(:organisation) { create(:organisation) }
let(:user) { create(:user, organisation:) }
let(:bulk_upload) { create(:bulk_upload, user:) }
let(:bulk_upload) { create(:bulk_upload, :lettings, user:) }
let!(:included_log) { create(:lettings_log, :in_progress, bulk_upload:, owning_organisation: organisation) }
let!(:excluded_log) { create(:lettings_log, :in_progress, owning_organisation: organisation) }
@ -492,7 +492,7 @@ RSpec.describe LettingsLogsController, type: :request do
let(:user) { create(:user, organisation:) }
let(:other_user) { create(:user, organisation:) }
let(:bulk_upload) { create(:bulk_upload, user: other_user) }
let(:bulk_upload) { create(:bulk_upload, :lettings, user: other_user) }
let!(:excluded_log) { create(:lettings_log, bulk_upload:, owning_organisation: organisation) }
let!(:also_excluded_log) { create(:lettings_log, owning_organisation: organisation) }
@ -509,7 +509,7 @@ RSpec.describe LettingsLogsController, type: :request do
let(:organisation) { create(:organisation) }
let(:user) { create(:user, organisation:) }
let(:bulk_upload) { create(:bulk_upload, user:) }
let(:bulk_upload) { create(:bulk_upload, :lettings, user:) }
it "redirects to resume the bulk upload" do
get "/lettings-logs?bulk_upload_id[]=#{bulk_upload.id}"

148
spec/services/bulk_upload/sales/year2023/csv_parser_spec.rb

@ -0,0 +1,148 @@
require "rails_helper"
RSpec.describe BulkUpload::Sales::Year2023::CsvParser do
subject(:service) { described_class.new(path:) }
let(:file) { Tempfile.new }
let(:path) { file.path }
let(:log) { build(:sales_log, :completed, :with_uprn) }
context "when parsing csv with headers" do
before do
file.write("Question\n")
file.write("Additional info\n")
file.write("Values\n")
file.write("Can be empty?\n")
file.write("Type of letting the question applies to\n")
file.write("Duplicate check field?\n")
file.write(BulkUpload::SalesLogToCsv.new(log:).default_2023_field_numbers_row)
file.write(BulkUpload::SalesLogToCsv.new(log:).to_2023_csv_row)
file.rewind
end
it "returns correct offsets" do
expect(service.row_offset).to eq(7)
expect(service.col_offset).to eq(1)
end
it "parses csv correctly" do
expect(service.row_parsers[0].field_19).to eql(log.uprn)
end
end
context "when parsing csv with headers in arbitrary order" do
let(:seed) { rand }
before do
file.write("Question\n")
file.write("Additional info\n")
file.write("Values\n")
file.write("Can be empty?\n")
file.write("Type of letting the question applies to\n")
file.write("Duplicate check field?\n")
file.write(BulkUpload::SalesLogToCsv.new(log:).default_2023_field_numbers_row(seed:))
file.write(BulkUpload::SalesLogToCsv.new(log:).to_2023_csv_row(seed:))
file.rewind
end
it "returns correct offsets" do
expect(service.row_offset).to eq(7)
expect(service.col_offset).to eq(1)
end
it "parses csv correctly" do
expect(service.row_parsers[0].field_19).to eql(log.uprn)
end
end
context "when parsing csv without headers" do
let(:file) { Tempfile.new }
let(:path) { file.path }
let(:log) { build(:sales_log, :completed, :with_uprn) }
before do
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
file.rewind
end
it "returns correct offsets" do
expect(service.row_offset).to eq(0)
expect(service.col_offset).to eq(0)
end
it "parses csv correctly" do
expect(service.row_parsers[0].field_19).to eql(log.uprn)
end
end
context "when parsing with BOM aka byte order mark" do
let(:file) { Tempfile.new }
let(:path) { file.path }
let(:log) { build(:sales_log, :completed, :with_uprn) }
let(:bom) { "\uFEFF" }
before do
file.write(bom)
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
file.close
end
it "parses csv correctly" do
expect(service.row_parsers[0].field_19).to eql(log.uprn)
end
end
context "when an invalid byte sequence" do
let(:file) { Tempfile.new }
let(:path) { file.path }
let(:log) { build(:sales_log, :completed, :with_uprn) }
let(:invalid_sequence) { "\x81" }
before do
file.write(invalid_sequence)
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
file.close
end
it "parses csv correctly" do
expect(service.row_parsers[0].field_19).to eql(log.uprn)
end
end
describe "#column_for_field", aggregate_failures: true do
context "when headers present" do
before do
file.write("Question\n")
file.write("Additional info\n")
file.write("Values\n")
file.write("Can be empty?\n")
file.write("Type of letting the question applies to\n")
file.write("Duplicate check field?\n")
file.write(BulkUpload::SalesLogToCsv.new(log:).default_2023_field_numbers_row)
file.write(BulkUpload::SalesLogToCsv.new(log:).to_2023_csv_row)
file.rewind
end
it "returns correct column" do
expect(service.column_for_field("field_1")).to eql("CO")
expect(service.column_for_field("field_99")).to eql("EK")
end
end
context "when no headers" do
let(:file) { Tempfile.new }
let(:path) { file.path }
let(:log) { build(:sales_log, :completed, :with_uprn) }
before do
file.write(BulkUpload::SalesLogToCsv.new(log:, col_offset: 0).to_2023_csv_row)
file.rewind
end
it "returns correct column" do
expect(service.column_for_field("field_1")).to eql("CN")
expect(service.column_for_field("field_99")).to eql("EJ")
end
end
end
end

704
spec/services/bulk_upload/sales/year2023/row_parser_spec.rb

@ -0,0 +1,704 @@
require "rails_helper"
RSpec.describe BulkUpload::Sales::Year2023::RowParser do
subject(:parser) { described_class.new(attributes) }
let(:now) { Time.zone.parse("01/03/2023") }
let(:attributes) { { bulk_upload: } }
let(:bulk_upload) { create(:bulk_upload, :sales, user:) }
let(:user) { create(:user, organisation: owning_org) }
let(:owning_org) { create(:organisation, :with_old_visible_id) }
let(:setup_section_params) do
{
bulk_upload:,
field_1: owning_org.old_visible_id, # organisation
field_2: user.email, # user
field_3: now.day.to_s, # sale day
field_4: now.month.to_s, # sale month
field_5: now.strftime("%g"), # sale year
field_6: "test id", # purchase id
field_7: "1", # owhershipsch
field_8: "2", # shared ownership sale type
field_13: "1", # will the buyers live in the property
field_14: "2", # joint purchase
}
end
let(:valid_attributes) do
{
bulk_upload:,
field_1: owning_org.old_visible_id,
field_3: "22",
field_4: "2",
field_5: "23",
field_6: "test id",
field_7: "1",
field_8: "2",
field_13: "1",
field_14: "1",
field_15: "2",
field_16: "2",
field_17: "1",
field_18: "1",
field_19: "100023336956",
field_24: "CR0",
field_25: "4BB",
field_26: "E09000008",
field_27: "3",
field_28: "1",
field_29: "1",
field_30: "32",
field_31: "M",
field_32: "12",
field_33: "18",
field_35: "1",
field_36: "1",
field_37: "R",
field_38: "32",
field_39: "F",
field_43: "2",
field_44: "1",
field_45: "0",
field_62: "1",
field_63: "1",
field_64: "A1",
field_65: "1AA",
field_66: "E09000008",
field_69: "1",
field_70: "1",
field_73: "3",
field_75: "5",
field_76: "3",
field_77: "3",
field_78: "30000",
field_79: "1",
field_80: "15000",
field_81: "1",
field_82: "4",
field_83: "20000",
field_84: "3",
field_86: "5",
field_87: "1",
field_88: "10",
field_89: "10",
field_91: "2",
field_92: "30",
field_93: "3",
field_94: "22",
field_95: "23",
field_96: "3",
field_97: "22",
field_98: "3",
field_99: "1",
field_100: "1",
field_101: "1",
field_102: "1",
field_103: "250000",
field_104: "25",
field_105: "1",
field_106: "42500",
field_107: "1",
field_109: "20",
field_110: "3",
field_111: "20000",
field_113: "800",
field_114: "200",
}
end
around do |example|
FormHandler.instance.use_real_forms!
example.run
FormHandler.instance.use_fake_forms!
end
describe "#blank_row?" do
context "when a new object" do
it "returns true" do
expect(parser).to be_blank_row
end
end
context "when any field is populated" do
before do
parser.field_1 = "1"
end
it "returns false" do
expect(parser).not_to be_blank_row
end
end
end
describe "validations" do
before do
stub_request(:get, /api.postcodes.io/)
.to_return(status: 200, body: "{\"status\":200,\"result\":{\"admin_district\":\"Manchester\", \"codes\":{\"admin_district\": \"E08000003\"}}}", headers: {})
body = {
results: [
{
DPA: {
"POSTCODE": "EC1N 2TD",
"POST_TOWN": "Newcastle",
"ORGANISATION_NAME": "Some place",
},
},
],
}.to_json
stub_request(:get, "https://api.os.uk/search/places/v1/uprn?key=OS_DATA_KEY&uprn=100023336956")
.to_return(status: 200, body:, headers: {})
parser.valid?
end
describe "#valid?" do
context "when the row is blank" do
let(:attributes) { { bulk_upload: } }
it "returns true" do
expect(parser).to be_valid
end
end
context "when calling the method multiple times" do
let(:attributes) { { bulk_upload:, field_7: 2 } }
it "does not add keep adding errors to the pile" do
expect { parser.valid? }.not_to change(parser.errors, :count)
end
end
context "when valid row" do
let(:attributes) { valid_attributes }
it "returns true" do
expect(parser).to be_valid
end
it "instantiates a log with everything completed", aggregate_failures: true do
questions = parser.send(:questions).reject do |q|
parser.send(:log).optional_fields.include?(q.id) || q.completed?(parser.send(:log))
end
expect(questions.map(&:id).size).to eq(0)
expect(questions.map(&:id)).to eql([])
end
end
end
context "when setup section not complete and type is not given" do
let(:attributes) do
{
bulk_upload:,
field_6: "test id",
}
end
it "has errors on correct setup fields" do
errors = parser.errors.select { |e| e.options[:category] == :setup }.map(&:attribute).sort
expect(errors).to eql(%i[field_1 field_3 field_4 field_5 field_7])
end
end
context "when setup section not complete and type is shared ownership" do
let(:attributes) do
{
bulk_upload:,
field_6: "test id",
field_7: "1",
}
end
it "has errors on correct setup fields" do
errors = parser.errors.select { |e| e.options[:category] == :setup }.map(&:attribute).sort
expect(errors).to eql(%i[field_1 field_14 field_3 field_4 field_5 field_8])
end
end
context "when setup section not complete it's shared ownership joint purchase" do
let(:attributes) do
{
bulk_upload:,
field_6: "test id",
field_7: "1",
field_8: "2",
field_14: "1",
}
end
it "has errors on correct setup fields" do
errors = parser.errors.select { |e| e.options[:category] == :setup }.map(&:attribute).sort
expect(errors).to eql(%i[field_1 field_15 field_3 field_4 field_5])
end
end
context "when setup section not complete and type is discounted ownership" do
let(:attributes) do
{
bulk_upload:,
field_6: "test id",
field_7: "2",
}
end
it "has errors on correct setup fields" do
errors = parser.errors.select { |e| e.options[:category] == :setup }.map(&:attribute).sort
expect(errors).to eql(%i[field_1 field_14 field_3 field_4 field_5 field_9])
end
end
context "when setup section not complete it's discounted ownership joint purchase" do
let(:attributes) do
{
bulk_upload:,
field_28: "test id",
field_30: "2",
field_47: "8",
field_39: "1",
}
end
it "has errors on correct setup fields" do
errors = parser.errors.select { |e| e.options[:category] == :setup }.map(&:attribute).sort
expect(errors).to eql(%i[field_1 field_3 field_4 field_5 field_7])
end
end
context "when setup section not complete and type is outright sale" do
let(:attributes) do
{
bulk_upload:,
field_6: "test id",
field_7: "3",
}
end
it "has errors on correct setup fields" do
errors = parser.errors.select { |e| e.options[:category] == :setup }.map(&:attribute).sort
expect(errors).to eql(%i[field_1 field_10 field_12 field_13 field_3 field_4 field_5])
end
end
context "when setup section not complete outright sale buyer is not company" do
let(:attributes) do
{
bulk_upload:,
field_6: "test id",
field_7: "3",
field_10: "12",
field_12: "2",
}
end
it "has errors on correct setup fields" do
errors = parser.errors.select { |e| e.options[:category] == :setup }.map(&:attribute).sort
expect(errors).to eql(%i[field_1 field_13 field_14 field_3 field_4 field_5])
end
end
describe "#field_1" do # owning org
context "when no data given" do
let(:attributes) { setup_section_params.merge(field_1: nil) }
it "is not permitted as setup error" do
expect(parser.errors.where(:field_1, category: :setup).map(&:message)).to eql(["The owning organisation code is incorrect"])
end
it "blocks log creation" do
expect(parser).to be_block_log_creation
end
end
context "when cannot find owning org" do
let(:attributes) { { bulk_upload:, field_1: "donotexist" } }
it "is not permitted as a setup error" do
expect(parser.errors.where(:field_1, category: :setup).map(&:message)).to eql(["The owning organisation code is incorrect"])
end
it "blocks log creation" do
expect(parser).to be_block_log_creation
end
end
context "when not affiliated with owning org" do
let(:unaffiliated_org) { create(:organisation, :with_old_visible_id) }
let(:attributes) { { bulk_upload:, field_1: unaffiliated_org.old_visible_id } }
it "is not permitted as setup error" do
expect(parser.errors.where(:field_1, category: :setup).map(&:message)).to eql(["You do not have permission to add logs for this owning organisation"])
end
it "blocks log creation" do
expect(parser).to be_block_log_creation
end
end
end
describe "#field_2" do # username for created_by
context "when blank" do
let(:attributes) { setup_section_params.merge(bulk_upload:, field_2: nil) }
it "is permitted" do
expect(parser.errors[:field_2]).to be_blank
end
end
context "when user could not be found" do
let(:attributes) { { bulk_upload:, field_2: "idonotexist@example.com" } }
it "is not permitted" do
expect(parser.errors[:field_2]).to be_present
end
end
context "when an unaffiliated user" do
let(:other_user) { create(:user) }
let(:attributes) { { bulk_upload:, field_1: owning_org.old_visible_id, field_2: other_user.email } }
it "is not permitted as a setup error" do
expect(parser.errors.where(:field_2, category: :setup)).to be_present
end
it "blocks log creation" do
expect(parser).to be_block_log_creation
end
end
context "when an user part of owning org" do
let(:other_user) { create(:user, organisation: owning_org) }
let(:attributes) { { bulk_upload:, field_1: owning_org.old_visible_id, field_2: other_user.email } }
it "is permitted" do
expect(parser.errors[:field_2]).to be_blank
end
end
end
describe "fields 3, 4, 5 => saledate" do
context "when all of these fields are blank" do
let(:attributes) { setup_section_params.merge({ field_3: nil, field_4: nil, field_5: nil }) }
it "returns them as setup errors" do
expect(parser.errors.where(:field_3, category: :setup)).to be_present
expect(parser.errors.where(:field_4, category: :setup)).to be_present
expect(parser.errors.where(:field_5, category: :setup)).to be_present
end
end
context "when one of these fields is blank" do
let(:attributes) { setup_section_params.merge({ field_3: "1", field_4: "1", field_5: nil }) }
it "returns an error only on blank field as setup error" do
expect(parser.errors[:field_3]).to be_blank
expect(parser.errors[:field_4]).to be_blank
expect(parser.errors.where(:field_5, category: :setup)).to be_present
end
end
context "when field 5 is 4 digits instead of 2" do
let(:attributes) { setup_section_params.merge({ bulk_upload:, field_5: "2022" }) }
it "returns a setup error" do
expect(parser.errors.where(:field_5, category: :setup).map(&:message)).to include("Sale completion year must be 2 digits")
end
end
context "when invalid date given" do
let(:attributes) { setup_section_params.merge({ field_3: "a", field_4: "12", field_5: "2022" }) }
it "does not raise an error" do
expect { parser.valid? }.not_to raise_error
end
end
context "when inside of collection year" do
around do |example|
Timecop.freeze(Date.new(2023, 10, 1)) do
example.run
end
end
let(:attributes) { setup_section_params.merge({ field_3: "1", field_4: "10", field_5: "23" }) }
let(:bulk_upload) { create(:bulk_upload, :sales, user:, year: 2023) }
it "does not return errors" do
expect(parser.errors[:field_3]).not_to be_present
expect(parser.errors[:field_4]).not_to be_present
expect(parser.errors[:field_5]).not_to be_present
end
end
context "when outside of collection year" do
around do |example|
Timecop.freeze(Date.new(2022, 4, 2)) do
example.run
end
end
let(:attributes) { setup_section_params.merge({ field_3: "1", field_4: "1", field_5: "22" }) }
let(:bulk_upload) { create(:bulk_upload, :sales, user:, year: 2022) }
it "returns setup errors" do
expect(parser.errors.where(:field_3, category: :setup)).to be_present
expect(parser.errors.where(:field_4, category: :setup)).to be_present
expect(parser.errors.where(:field_5, category: :setup)).to be_present
end
end
end
describe "#field_19" do # UPRN
context "when UPRN known" do
let(:attributes) { setup_section_params.merge({ field_19: "100023336956" }) }
it "is valid" do
expect(parser.errors[:field_19]).to be_blank
end
end
context "when UPRN not known but address known" do
let(:attributes) { setup_section_params.merge({ field_19: nil, field_20: "some street", field_22: "some town", field_24: "EC1N", field_25: "2TD" }) }
it "is valid" do
expect(parser.errors[:field_19]).to be_blank
end
end
context "when neither UPRN or address known" do
let(:attributes) { setup_section_params.merge({ field_19: nil, field_20: nil, field_22: nil, field_24: nil, field_25: nil }) }
it "is not valid" do
expect(parser.errors[:field_19]).to be_present
end
end
end
[
{ field: :field_20, name: "address line 1" },
{ field: :field_22, name: "town or city" },
{ field: :field_24, name: "postcode part 1" },
{ field: :field_25, name: "postcode part 2" },
].each do |data|
describe "##{data[:field]} (#{data[:name]})" do
context "when UPRN present" do
let(:attributes) { setup_section_params.merge({ field_19: "100023336956", data[:field] => nil }) }
it "can be blank" do
expect(parser.errors[data[:field]]).to be_blank
end
end
context "when UPRN not present" do
let(:attributes) { setup_section_params.merge({ field_19: nil, data[:field] => nil }) }
it "cannot be blank" do
expect(parser.errors[data[:field]]).to be_present
end
end
end
end
[
%w[age1_known age1 field_30],
%w[age2_known age2 field_38],
%w[age3_known age3 field_47],
%w[age4_known age4 field_51],
%w[age5_known age5 field_55],
%w[age6_known age6 field_59],
].each do |known, age, field|
describe "##{known} and ##{age}" do
context "when #{field} is blank" do
let(:attributes) { { bulk_upload:, field.to_s => nil } }
it "sets ##{known} 1" do
expect(parser.log.public_send(known)).to be(1)
end
it "sets ##{age} to nil" do
expect(parser.log.public_send(age)).to be_nil
end
end
context "when #{field} is R" do
let(:attributes) { setup_section_params.merge({ field.to_s => "R", field_28: "1", field_45: "5", field_29: "1" }) }
it "sets ##{known} 1" do
expect(parser.log.public_send(known)).to be(1)
end
it "sets ##{age} to nil" do
expect(parser.log.public_send(age)).to be_nil
end
end
context "when #{field} is a number" do
let(:attributes) { setup_section_params.merge({ field.to_s => "50", field_28: "1", field_45: "5", field_29: "1" }) }
it "sets ##{known} to 0" do
expect(parser.log.public_send(known)).to be(0)
end
it "sets ##{age} to given age" do
expect(parser.log.public_send(age)).to be(50)
end
end
context "when #{field} is a non-sensical value" do
let(:attributes) { setup_section_params.merge({ field.to_s => "A", field_28: "1", field_45: "5", field_29: "1" }) }
it "sets ##{known} to 0" do
expect(parser.log.public_send(known)).to be(0)
end
it "sets ##{age} to nil" do
expect(parser.log.public_send(age)).to be_nil
end
end
end
end
describe "#field_36" do # will buyer1 live in property?
context "when not a possible value" do
let(:attributes) { valid_attributes.merge({ field_36: "3" }) }
it "is not valid" do
expect(parser.errors).to include(:field_36)
end
end
end
end
describe "#log" do
describe "#uprn" do
let(:attributes) { setup_section_params.merge({ field_19: "100023336956" }) }
it "is correctly set" do
expect(parser.log.uprn).to eql("100023336956")
end
end
describe "#address_line1" do
let(:attributes) { setup_section_params.merge({ field_20: "some street" }) }
it "is correctly set" do
expect(parser.log.address_line1).to eql("some street")
end
end
describe "#address_line2" do
let(:attributes) { setup_section_params.merge({ field_21: "some other street" }) }
it "is correctly set" do
expect(parser.log.address_line2).to eql("some other street")
end
end
describe "#town_or_city" do
let(:attributes) { setup_section_params.merge({ field_22: "some town" }) }
it "is correctly set" do
expect(parser.log.town_or_city).to eql("some town")
end
end
describe "#county" do
let(:attributes) { setup_section_params.merge({ field_23: "some county" }) }
it "is correctly set" do
expect(parser.log.county).to eql("some county")
end
end
describe "#ethnic_group2" do
let(:attributes) { setup_section_params.merge({ field_40: "1" }) }
it "is correctly set" do
expect(parser.log.ethnic_group2).to be(0)
end
end
describe "#ethnicbuy2" do
let(:attributes) { setup_section_params.merge({ field_40: "1" }) }
it "is correctly set" do
expect(parser.log.ethnicbuy2).to be(1)
end
end
describe "#nationalbuy2" do
let(:attributes) { setup_section_params.merge({ field_41: "18" }) }
it "is correctly set" do
expect(parser.log.nationalbuy2).to be(18)
end
end
describe "#buy2living" do
let(:attributes) { setup_section_params.merge({ field_71: "1" }) }
it "is correctly set" do
expect(parser.log.buy2living).to be(1)
end
end
describe "#prevtenbuy2" do
let(:attributes) { setup_section_params.merge({ field_72: "R" }) }
it "is correctly set" do
expect(parser.log.prevtenbuy2).to be(0)
end
end
describe "#hhregresstill" do
let(:attributes) { setup_section_params.merge({ field_74: "4" }) }
it "is correctly set" do
expect(parser.log.hhregresstill).to be(4)
end
end
describe "#prevshared" do
let(:attributes) { setup_section_params.merge({ field_85: "3" }) }
it "is correctly set" do
expect(parser.log.prevshared).to be(3)
end
end
describe "#staircasesale" do
let(:attributes) { setup_section_params.merge({ field_90: "1" }) }
it "is correctly set" do
expect(parser.log.staircasesale).to be(1)
end
end
describe "#soctenant" do
let(:attributes) { setup_section_params.merge({ field_99: "1" }) }
it "is correctly set" do
expect(parser.log.soctenant).to be(1)
end
end
end
end

41
spec/support/bulk_upload/sales_log_to_csv.rb

@ -16,6 +16,15 @@ class BulkUpload::SalesLogToCsv
(row_prefix + to_2022_row).flatten.join(",") + line_ending
end
def to_2023_csv_row(seed: nil)
if seed
row = to_2023_row.shuffle(random: Random.new(seed))
(row_prefix + row).flatten.join(",") + line_ending
else
(row_prefix + to_2023_row).flatten.join(",") + line_ending
end
end
def default_2022_field_numbers
(1..125).to_a
end
@ -28,6 +37,34 @@ class BulkUpload::SalesLogToCsv
end.flatten.join(",") + line_ending
end
def default_2023_field_numbers_row(seed: nil)
if seed
["Bulk upload field number"] + default_2023_field_numbers.shuffle(random: Random.new(seed))
else
["Bulk upload field number"] + default_2023_field_numbers
end.flatten.join(",") + line_ending
end
def to_2023_row
to_2022_row + [
log.uprn,
log.address_line1,
log.address_line2,
log.town_or_city,
log.county,
nil, # mistake in template. field is ignored
log.ethnic_group2,
log.nationalbuy2,
nil, # mistake in template. field is ignored
log.buy2living,
log.prevtenbuy2,
log.hhregresstill,
log.prevshared,
log.staircasesale,
log.soctenant,
]
end
def to_2022_row
[
log.purchid, # 1
@ -177,6 +214,10 @@ class BulkUpload::SalesLogToCsv
private
def default_2023_field_numbers
[6, 3, 4, 5, nil, 28, 30, 38, 47, 51, 55, 59, 31, 39, 48, 52, 56, 60, 37, 46, 50, 54, 58, 35, 43, 49, 53, 57, 61, 32, 33, 78, 80, 79, 81, 83, 84, nil, 62, 66, 64, 65, 63, 67, 69, 70, 68, 76, 77, 16, 17, 18, 26, 24, 25, 27, 8, 91, 95, 96, 97, 92, 93, 94, 98, 100, 101, 103, 104, 106, 110, 111, 112, 113, 114, 9, 116, 117, 118, 120, 124, 125, 126, 10, 11, nil, 127, 129, 133, 134, 135, 1, 2, nil, 73, nil, 75, 107, 108, 121, 122, 130, 131, 82, 109, 123, 132, 115, 15, 86, 87, 29, 7, 12, 13, 14, 36, 44, 45, 88, 89, 102, 105, 119, 128, 19, 20, 21, 22, 23, 34, 40, 41, 42, 71, 72, 74, 85, 90, 99]
end
def hhregres
if log.hhregres == 1
log.hhregresstill

Loading…
Cancel
Save