Browse Source
* Add sales csv parser * Create log creator and update row parser for sales * Add validations * Add status_cache to sales logs * Parse completed log * Add basic validations to the row parser * Fix details known and proplen mapping * Add setup section errors * Update sales log validator * tests * Add sales resume and summary pages * send the correct emailspull/1587/head
kosiakkatrina
2 years ago
committed by
GitHub
25 changed files with 2337 additions and 81 deletions
@ -0,0 +1,42 @@ |
|||||||
|
class BulkUploadSalesResumeController < ApplicationController |
||||||
|
before_action :authenticate_user! |
||||||
|
|
||||||
|
def start |
||||||
|
@bulk_upload = current_user.bulk_uploads.find(params[:id]) |
||||||
|
|
||||||
|
redirect_to page_bulk_upload_sales_resume_path(@bulk_upload, page: "fix-choice") |
||||||
|
end |
||||||
|
|
||||||
|
def show |
||||||
|
@bulk_upload = current_user.bulk_uploads.find(params[:id]) |
||||||
|
|
||||||
|
render form.view_path |
||||||
|
end |
||||||
|
|
||||||
|
def update |
||||||
|
@bulk_upload = current_user.bulk_uploads.find(params[:id]) |
||||||
|
|
||||||
|
if form.valid? && form.save! |
||||||
|
redirect_to form.next_path |
||||||
|
else |
||||||
|
render form.view_path |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
private |
||||||
|
|
||||||
|
def form |
||||||
|
@form ||= case params[:page] |
||||||
|
when "fix-choice" |
||||||
|
Forms::BulkUploadSalesResume::FixChoice.new(form_params.merge(bulk_upload: @bulk_upload)) |
||||||
|
when "confirm" |
||||||
|
Forms::BulkUploadSalesResume::Confirm.new(form_params.merge(bulk_upload: @bulk_upload)) |
||||||
|
else |
||||||
|
raise "invalid form" |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
def form_params |
||||||
|
params.fetch(:form, {}).permit(:choice) |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,30 @@ |
|||||||
|
module Forms |
||||||
|
module BulkUploadSalesResume |
||||||
|
class Confirm |
||||||
|
include ActiveModel::Model |
||||||
|
include ActiveModel::Attributes |
||||||
|
include Rails.application.routes.url_helpers |
||||||
|
|
||||||
|
attribute :bulk_upload |
||||||
|
|
||||||
|
def view_path |
||||||
|
"bulk_upload_sales_resume/confirm" |
||||||
|
end |
||||||
|
|
||||||
|
def back_path |
||||||
|
page_bulk_upload_sales_resume_path(bulk_upload, page: "fix-choice") |
||||||
|
end |
||||||
|
|
||||||
|
def next_path |
||||||
|
resume_bulk_upload_sales_result_path(bulk_upload) |
||||||
|
end |
||||||
|
|
||||||
|
def save! |
||||||
|
processor = BulkUpload::Processor.new(bulk_upload:) |
||||||
|
processor.approve |
||||||
|
|
||||||
|
true |
||||||
|
end |
||||||
|
end |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,53 @@ |
|||||||
|
module Forms |
||||||
|
module BulkUploadSalesResume |
||||||
|
class FixChoice |
||||||
|
include ActiveModel::Model |
||||||
|
include ActiveModel::Attributes |
||||||
|
include Rails.application.routes.url_helpers |
||||||
|
|
||||||
|
attribute :bulk_upload |
||||||
|
attribute :choice, :string |
||||||
|
|
||||||
|
validates :choice, presence: true, |
||||||
|
inclusion: { in: %w[create-fix-inline upload-again] } |
||||||
|
|
||||||
|
def options |
||||||
|
[ |
||||||
|
OpenStruct.new(id: "create-fix-inline", name: "Upload these logs and fix errors on CORE site"), |
||||||
|
OpenStruct.new(id: "upload-again", name: "Fix errors in the CSV and re-upload"), |
||||||
|
] |
||||||
|
end |
||||||
|
|
||||||
|
def view_path |
||||||
|
"bulk_upload_sales_resume/fix_choice" |
||||||
|
end |
||||||
|
|
||||||
|
def next_path |
||||||
|
case choice |
||||||
|
when "create-fix-inline" |
||||||
|
page_bulk_upload_sales_resume_path(bulk_upload, page: "confirm") |
||||||
|
when "upload-again" |
||||||
|
if BulkUploadErrorSummaryTableComponent.new(bulk_upload:).errors? |
||||||
|
summary_bulk_upload_sales_result_path(bulk_upload) |
||||||
|
else |
||||||
|
bulk_upload_sales_result_path(bulk_upload) |
||||||
|
end |
||||||
|
else |
||||||
|
raise "invalid choice" |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
def recommendation |
||||||
|
if BulkUploadErrorSummaryTableComponent.new(bulk_upload:).errors? |
||||||
|
"For this many errors we recommend to fix errors in the CSV and re-upload as you may be able to edit many fields at once in a CSV." |
||||||
|
else |
||||||
|
"For this many errors we recommend to upload logs and fix errors on site as you can easily see the questions and select the appropriate answer." |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
def save! |
||||||
|
true |
||||||
|
end |
||||||
|
end |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,70 @@ |
|||||||
|
class BulkUpload::Sales::LogCreator |
||||||
|
attr_reader :bulk_upload, :path |
||||||
|
|
||||||
|
def initialize(bulk_upload:, path:) |
||||||
|
@bulk_upload = bulk_upload |
||||||
|
@path = path |
||||||
|
end |
||||||
|
|
||||||
|
def call |
||||||
|
row_parsers.each do |row_parser| |
||||||
|
row_parser.valid? |
||||||
|
|
||||||
|
next if row_parser.blank_row? |
||||||
|
|
||||||
|
row_parser.log.blank_invalid_non_setup_fields! |
||||||
|
row_parser.log.bulk_upload = bulk_upload |
||||||
|
row_parser.log.skip_update_status = true |
||||||
|
row_parser.log.status = "pending" |
||||||
|
|
||||||
|
row_parser.log.status_cache = row_parser.log.calculate_status |
||||||
|
|
||||||
|
begin |
||||||
|
row_parser.log.save! |
||||||
|
rescue StandardError => e |
||||||
|
Sentry.capture_exception(e) |
||||||
|
end |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
private |
||||||
|
|
||||||
|
def csv_parser |
||||||
|
@csv_parser ||= case bulk_upload.year |
||||||
|
when 2022 |
||||||
|
BulkUpload::Sales::Year2022::CsvParser.new(path:) |
||||||
|
when 2023 |
||||||
|
BulkUpload::Sales::Year2023::CsvParser.new(path:) |
||||||
|
else |
||||||
|
raise "csv parser not found" |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
def row_offset |
||||||
|
csv_parser.row_offset |
||||||
|
end |
||||||
|
|
||||||
|
def col_offset |
||||||
|
csv_parser.col_offset |
||||||
|
end |
||||||
|
|
||||||
|
def row_parsers |
||||||
|
return @row_parsers if @row_parsers |
||||||
|
|
||||||
|
@row_parsers = csv_parser.row_parsers |
||||||
|
|
||||||
|
@row_parsers.each do |row_parser| |
||||||
|
row_parser.bulk_upload = bulk_upload |
||||||
|
end |
||||||
|
|
||||||
|
@row_parsers |
||||||
|
end |
||||||
|
|
||||||
|
def body_rows |
||||||
|
csv_parser.body_rows |
||||||
|
end |
||||||
|
|
||||||
|
def rows |
||||||
|
csv_parser.rows |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,70 @@ |
|||||||
|
require "csv" |
||||||
|
|
||||||
|
class BulkUpload::Sales::Year2022::CsvParser |
||||||
|
MIN_COLUMNS = 125 |
||||||
|
MAX_COLUMNS = 126 |
||||||
|
|
||||||
|
attr_reader :path |
||||||
|
|
||||||
|
def initialize(path:) |
||||||
|
@path = path |
||||||
|
end |
||||||
|
|
||||||
|
def row_offset |
||||||
|
with_headers? ? 5 : 0 |
||||||
|
end |
||||||
|
|
||||||
|
def col_offset |
||||||
|
with_headers? ? 1 : 0 |
||||||
|
end |
||||||
|
|
||||||
|
def cols |
||||||
|
@cols ||= ("A".."DV").to_a |
||||||
|
end |
||||||
|
|
||||||
|
def row_parsers |
||||||
|
@row_parsers ||= body_rows.map do |row| |
||||||
|
stripped_row = row[col_offset..] |
||||||
|
headers = ("field_1".."field_125").to_a |
||||||
|
hash = Hash[headers.zip(stripped_row)] |
||||||
|
|
||||||
|
BulkUpload::Sales::Year2022::RowParser.new(hash) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
def body_rows |
||||||
|
rows[row_offset..] |
||||||
|
end |
||||||
|
|
||||||
|
def rows |
||||||
|
@rows ||= CSV.parse(normalised_string, row_sep:) |
||||||
|
end |
||||||
|
|
||||||
|
def column_for_field(field) |
||||||
|
cols[headers.find_index(field) + col_offset] |
||||||
|
end |
||||||
|
|
||||||
|
private |
||||||
|
|
||||||
|
def headers |
||||||
|
@headers ||= ("field_1".."field_125").to_a |
||||||
|
end |
||||||
|
|
||||||
|
def with_headers? |
||||||
|
rows.map { |r| r[0] }.any? { |cell| cell&.match?(/field number/i) } |
||||||
|
end |
||||||
|
|
||||||
|
def row_sep |
||||||
|
"\n" |
||||||
|
end |
||||||
|
|
||||||
|
def normalised_string |
||||||
|
return @normalised_string if @normalised_string |
||||||
|
|
||||||
|
@normalised_string = File.read(path, encoding: "bom|utf-8") |
||||||
|
@normalised_string.gsub!("\r\n", "\n") |
||||||
|
@normalised_string.scrub!("") |
||||||
|
|
||||||
|
@normalised_string |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,11 @@ |
|||||||
|
<div class="govuk-grid-row"> |
||||||
|
<div class="govuk-grid-column-two-thirds"> |
||||||
|
<h1 class="govuk-heading-xl">There are no more logs that need updating</h1> |
||||||
|
</div> |
||||||
|
</div> |
||||||
|
|
||||||
|
<p class="govuk-body-l"> |
||||||
|
You’ve completed all the logs that had errors from your bulk upload. |
||||||
|
</p> |
||||||
|
|
||||||
|
<%= govuk_button_link_to "Back to all logs", sales_logs_path, button: true %> |
@ -0,0 +1,30 @@ |
|||||||
|
<div class="govuk-grid-row"> |
||||||
|
<div class="govuk-grid-column-two-thirds"> |
||||||
|
<span class="govuk-caption-l">Bulk upload for sales (<%= @bulk_upload.year_combo %>)</span> |
||||||
|
<h1 class="govuk-heading-l">Fix <%= pluralize(@bulk_upload.bulk_upload_errors.count, "error") %> and upload file again</h1> |
||||||
|
|
||||||
|
<p class="govuk-body-l"> |
||||||
|
We could not create logs from your bulk upload. Below is a list of everything that you need to fix your spreadsheet. You can download the <%= govuk_link_to "specification", Forms::BulkUploadSales::PrepareYourFile.new(year: @bulk_upload.year).specification_path, target: "_blank" %> to help you fix the cells in your CSV file. |
||||||
|
</p> |
||||||
|
|
||||||
|
<p class="govuk-body-l"> |
||||||
|
Filename: <%= @bulk_upload.filename %> |
||||||
|
</p> |
||||||
|
</div> |
||||||
|
</div> |
||||||
|
|
||||||
|
<div class="govuk-grid-row"> |
||||||
|
<%= govuk_tabs(title: "Error reports") do |c| %> |
||||||
|
<% c.with_tab(label: "Summary") do %> |
||||||
|
<%= render BulkUploadErrorSummaryTableComponent.new(bulk_upload: @bulk_upload) %> |
||||||
|
<% end %> |
||||||
|
|
||||||
|
<% c.with_tab(label: "Full error report") do %> |
||||||
|
<% @bulk_upload.bulk_upload_errors.order_by_cell.group_by(&:row).each do |_row, errors_for_row| %> |
||||||
|
<%= render BulkUploadErrorRowComponent.new(bulk_upload_errors: errors_for_row) %> |
||||||
|
<% end %> |
||||||
|
<% end %> |
||||||
|
<% end %> |
||||||
|
</div> |
||||||
|
|
||||||
|
<%= govuk_button_link_to "Upload your file again", start_bulk_upload_sales_logs_path %> |
@ -0,0 +1,22 @@ |
|||||||
|
<% content_for :before_content do %> |
||||||
|
<%= govuk_back_link href: @form.back_path %> |
||||||
|
<% end %> |
||||||
|
|
||||||
|
<div class="govuk-grid-row"> |
||||||
|
<div class="govuk-grid-column-two-thirds"> |
||||||
|
<span class="govuk-caption-l">Bulk upload for sales (<%= @bulk_upload.year_combo %>)</span> |
||||||
|
<h1 class="govuk-heading-l">Are you sure you want to upload all logs from this bulk upload?</h1> |
||||||
|
|
||||||
|
<p class="govuk-body">There are <%= pluralize(@bulk_upload.logs.count, "log") %> in this bulk upload with <%= pluralize(@bulk_upload.bulk_upload_errors.count, "error") %> that still need to be fixed after upload.</p> |
||||||
|
|
||||||
|
<%= govuk_warning_text(icon_fallback_text: "Danger") do %> |
||||||
|
You can not delete logs once you create them |
||||||
|
<% end %> |
||||||
|
|
||||||
|
<%= form_with model: @form, scope: :form, url: page_bulk_upload_sales_resume_path(@bulk_upload, page: "confirm"), method: :patch do |f| %> |
||||||
|
<%= f.govuk_submit %> |
||||||
|
|
||||||
|
<%= govuk_button_link_to "Cancel", @form.back_path, secondary: true %> |
||||||
|
<% end %> |
||||||
|
</div> |
||||||
|
</div> |
@ -0,0 +1,36 @@ |
|||||||
|
<div class="govuk-grid-row"> |
||||||
|
<div class="govuk-grid-column-two-thirds"> |
||||||
|
<%= form_with model: @form, scope: :form, url: page_bulk_upload_sales_resume_path(@bulk_upload, page: "fix-choice"), method: :patch do |f| %> |
||||||
|
<%= f.govuk_error_summary %> |
||||||
|
|
||||||
|
<span class="govuk-caption-l">Bulk upload for sales (<%= @bulk_upload.year_combo %>)</span> |
||||||
|
<h1 class="govuk-heading-l">How would you like to fix <%= pluralize(@bulk_upload.bulk_upload_errors.count, "error") %>?</h1> |
||||||
|
|
||||||
|
<div class="govuk-body-l"> |
||||||
|
<%= @bulk_upload.filename %> |
||||||
|
</div> |
||||||
|
|
||||||
|
<div class="govuk-body"> |
||||||
|
<%= @form.recommendation %> |
||||||
|
</div> |
||||||
|
|
||||||
|
<%= govuk_details(summary_text: "How to choose between fixing errors on the CORE site or in the CSV") do %> |
||||||
|
<p class="govuk-body">When it comes to fixing errors, there are pros and cons to doing it on a CSV versus doing it on a website.</p> |
||||||
|
|
||||||
|
<p class="govuk-body">Fixing errors on a CSV file can be beneficial because it allows you to easily make changes to multiple records at once, and you can use tools like Excel to quickly identify and correct errors. However, if the CSV file is not properly formatted, it can be difficult to identify which records contain errors.</p> |
||||||
|
|
||||||
|
<p class="govuk-body">Fixing errors on a website can be convenient because you can see the data in context and make changes in real-time. However, this approach can be time-consuming if you need to make changes to multiple records, and it may be more difficult to identify errors in a large dataset.</p> |
||||||
|
|
||||||
|
<p class="govuk-body">Ultimately, the best approach will depend on the specific situation and the nature of the errors that need to be fixed.</p> |
||||||
|
<% end %> |
||||||
|
|
||||||
|
<%= f.govuk_collection_radio_buttons :choice, |
||||||
|
@form.options, |
||||||
|
:id, |
||||||
|
:name, |
||||||
|
legend: { hidden: true } %> |
||||||
|
|
||||||
|
<%= f.govuk_submit %> |
||||||
|
<% end %> |
||||||
|
</div> |
||||||
|
</div> |
@ -0,0 +1,5 @@ |
|||||||
|
class AddStatusCache < ActiveRecord::Migration[7.0] |
||||||
|
def change |
||||||
|
add_column :sales_logs, :status_cache, :integer, null: false, default: 0 |
||||||
|
end |
||||||
|
end |
|
|
@ -0,0 +1,99 @@ |
|||||||
|
require "rails_helper" |
||||||
|
|
||||||
|
RSpec.describe BulkUpload::Sales::LogCreator do |
||||||
|
subject(:service) { described_class.new(bulk_upload:, path:) } |
||||||
|
|
||||||
|
let(:owning_org) { create(:organisation, old_visible_id: 123) } |
||||||
|
let(:user) { create(:user, organisation: owning_org) } |
||||||
|
|
||||||
|
let(:bulk_upload) { create(:bulk_upload, :sales, user:) } |
||||||
|
let(:path) { file_fixture("completed_2022_23_sales_bulk_upload.csv") } |
||||||
|
|
||||||
|
describe "#call" do |
||||||
|
context "when a valid csv with new log" do |
||||||
|
it "creates a new log" do |
||||||
|
expect { service.call }.to change(SalesLog, :count) |
||||||
|
end |
||||||
|
|
||||||
|
it "create a log with pending status" do |
||||||
|
service.call |
||||||
|
expect(SalesLog.last.status).to eql("pending") |
||||||
|
end |
||||||
|
|
||||||
|
it "associates log with bulk upload" do |
||||||
|
service.call |
||||||
|
|
||||||
|
log = SalesLog.last |
||||||
|
expect(log.bulk_upload).to eql(bulk_upload) |
||||||
|
expect(bulk_upload.sales_logs).to include(log) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when a valid csv with several blank rows" do |
||||||
|
let(:file) { Tempfile.new } |
||||||
|
let(:path) { file.path } |
||||||
|
let(:log) { SalesLog.new } |
||||||
|
|
||||||
|
before do |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.rewind |
||||||
|
end |
||||||
|
|
||||||
|
it "ignores them and does not create the logs" do |
||||||
|
expect { service.call }.not_to change(SalesLog, :count) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when a valid csv with row with one invalid non setup field" do |
||||||
|
let(:file) { Tempfile.new } |
||||||
|
let(:path) { file.path } |
||||||
|
let(:log) do |
||||||
|
build( |
||||||
|
:sales_log, |
||||||
|
:completed, |
||||||
|
age1: 5, |
||||||
|
owning_organisation: owning_org, |
||||||
|
) |
||||||
|
end |
||||||
|
|
||||||
|
before do |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.rewind |
||||||
|
end |
||||||
|
|
||||||
|
it "creates the log" do |
||||||
|
expect { service.call }.to change(SalesLog, :count).by(1) |
||||||
|
end |
||||||
|
|
||||||
|
it "blanks invalid field" do |
||||||
|
service.call |
||||||
|
|
||||||
|
record = SalesLog.last |
||||||
|
expect(record.age1).to be_blank |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when pre-creating logs" do |
||||||
|
subject(:service) { described_class.new(bulk_upload:, path:) } |
||||||
|
|
||||||
|
it "creates a new log" do |
||||||
|
expect { service.call }.to change(SalesLog, :count) |
||||||
|
end |
||||||
|
|
||||||
|
it "creates a log with correct states" do |
||||||
|
service.call |
||||||
|
|
||||||
|
last_log = SalesLog.last |
||||||
|
|
||||||
|
expect(last_log.status).to eql("pending") |
||||||
|
expect(last_log.status_cache).to eql("completed") |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when valid csv with existing log" do |
||||||
|
xit "what should happen?" |
||||||
|
end |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,97 @@ |
|||||||
|
require "rails_helper" |
||||||
|
|
||||||
|
RSpec.describe BulkUpload::Sales::Year2022::CsvParser do |
||||||
|
subject(:service) { described_class.new(path:) } |
||||||
|
|
||||||
|
let(:path) { file_fixture("completed_2022_23_sales_bulk_upload.csv") } |
||||||
|
|
||||||
|
context "when parsing csv with headers" do |
||||||
|
it "returns correct offsets" do |
||||||
|
expect(service.row_offset).to eq(5) |
||||||
|
expect(service.col_offset).to eq(1) |
||||||
|
end |
||||||
|
|
||||||
|
it "parses csv correctly" do |
||||||
|
expect(service.row_parsers[0].field_7.to_i).to eq(32) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when parsing csv without headers" do |
||||||
|
let(:file) { Tempfile.new } |
||||||
|
let(:path) { file.path } |
||||||
|
let(:log) { build(:sales_log, :completed) } |
||||||
|
|
||||||
|
before do |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.rewind |
||||||
|
end |
||||||
|
|
||||||
|
it "returns correct offsets" do |
||||||
|
expect(service.row_offset).to eq(0) |
||||||
|
expect(service.col_offset).to eq(0) |
||||||
|
end |
||||||
|
|
||||||
|
it "parses csv correctly" do |
||||||
|
expect(service.row_parsers[0].field_7.to_i).to eql(log.age1) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when parsing with BOM aka byte order mark" do |
||||||
|
let(:file) { Tempfile.new } |
||||||
|
let(:path) { file.path } |
||||||
|
let(:log) { build(:sales_log, :completed) } |
||||||
|
let(:bom) { "\uFEFF" } |
||||||
|
|
||||||
|
before do |
||||||
|
file.write(bom) |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.close |
||||||
|
end |
||||||
|
|
||||||
|
it "parses csv correctly" do |
||||||
|
expect(service.row_parsers[0].field_7.to_i).to eql(log.age1) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when an invalid byte sequence" do |
||||||
|
let(:file) { Tempfile.new } |
||||||
|
let(:path) { file.path } |
||||||
|
let(:log) { build(:sales_log, :completed) } |
||||||
|
let(:invalid_sequence) { "\x81" } |
||||||
|
|
||||||
|
before do |
||||||
|
file.write(invalid_sequence) |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.close |
||||||
|
end |
||||||
|
|
||||||
|
it "parses csv correctly" do |
||||||
|
expect(service.row_parsers[0].field_7.to_i).to eql(log.age1) |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
describe "#column_for_field", aggregate_failures: true do |
||||||
|
context "when headers present" do |
||||||
|
it "returns correct column" do |
||||||
|
expect(service.column_for_field("field_1")).to eql("B") |
||||||
|
expect(service.column_for_field("field_125")).to eql("DV") |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
context "when no headers" do |
||||||
|
let(:file) { Tempfile.new } |
||||||
|
let(:path) { file.path } |
||||||
|
let(:log) { build(:sales_log, :completed) } |
||||||
|
|
||||||
|
before do |
||||||
|
file.write(BulkUpload::LogToCsv.new(log:, col_offset: 0).to_2022_sales_csv_row) |
||||||
|
file.rewind |
||||||
|
end |
||||||
|
|
||||||
|
it "returns correct column" do |
||||||
|
expect(service.column_for_field("field_1")).to eql("A") |
||||||
|
expect(service.column_for_field("field_125")).to eql("DU") |
||||||
|
end |
||||||
|
end |
||||||
|
end |
||||||
|
end |
Loading…
Reference in new issue