Skip to content

Commit

Permalink
Revert "WIP add extra columns to the spreadsheet and allow creation o…
Browse files Browse the repository at this point in the history
…f new subrecords"

This reverts commit 475a1d8.
  • Loading branch information
payten committed Apr 1, 2021
1 parent 475a1d8 commit a6750af
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 73 deletions.
18 changes: 7 additions & 11 deletions backend/model/spreadsheet_builder.rb
Original file line number Diff line number Diff line change
Expand Up @@ -118,13 +118,10 @@ def calculate_max_subrecords

DB.open do |db|
SUBRECORDS_OF_INTEREST.each do |subrecord|
max = db[subrecord]
.filter(:archival_object_id => @ao_ids)
.group_and_count(:archival_object_id)
.max(:count) || 0

# Notes, Extent: At least 3 more than the max
results[subrecord] = max + 3
results[subrecord] = db[subrecord]
.filter(:archival_object_id => @ao_ids)
.group_and_count(:archival_object_id)
.max(:count) || 0
end

MULTIPART_NOTES_OF_INTEREST.each do |note_type|
Expand All @@ -140,10 +137,7 @@ def calculate_max_subrecords

pp query.sql

max = (query.first || {})[:max] || 0

# Notes: At least 2 of each type
results[note_type] = [max, 2].max
results[note_type] = (query.first || {})[:max] || 0
end
end

Expand Down Expand Up @@ -264,13 +258,15 @@ def dataset_iterator(&block)
current_row << ColumnAndValue.new(note_content, column)
else
current_row << ColumnAndValue.new(nil, column)
locked_column_indexes << current_row.length - 1
end
else
subrecord_data = subrecord_datasets.fetch(column.jsonmodel).fetch(row[:id], []).fetch(column.index, nil)
if subrecord_data
current_row << ColumnAndValue.new(subrecord_data.fetch(column.name, nil), column)
else
current_row << ColumnAndValue.new(nil, column)
locked_column_indexes << current_row.length - 1
end
end
end
Expand Down
2 changes: 0 additions & 2 deletions backend/model/spreadsheet_bulk_update_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,6 @@ def run
job.write_output("\n\nPlease correct any issues with your import spreadsheet and retry.\n")

job.finish!(:failed)

raise e
rescue => e
Log.exception(e)
job.write_output("Unexpected failure while running job. Error: #{e}")
Expand Down
69 changes: 9 additions & 60 deletions backend/model/spreadsheet_bulk_updater.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,6 @@ class SpreadsheetBulkUpdater

BATCH_SIZE = 128

SUBRECORD_DEFAULTS = {
:date => {
'date_type' => 'inclusive',
'label' => 'creation',
},
}

def self.run(filename, job)
check_sheet(filename)
errors = []
Expand All @@ -32,9 +25,6 @@ def self.run(filename, job)
row = to_process.fetch(ao.id)
last_column = nil

subrecord_by_index = {}
all_text_subnotes_by_type = {}

begin
row.values.each do |path, value|
column = column_by_path.fetch(path)
Expand Down Expand Up @@ -62,71 +52,30 @@ def self.run(filename, job)
end
end
elsif column.jsonmodel == :note
unless all_text_subnotes_by_type.has_key?(column.name)
all_text_subnotes = ao_json.notes
.select{|note| note['jsonmodel_type'] == 'note_multipart' && note['type'] == column.name.to_s}
.map{|note| note['subnotes']}
.flatten
.select{|subnote| subnote['jsonmodel_type'] == 'note_text'}

all_text_subnotes_by_type[column.name] = all_text_subnotes
end

clean_value = column.sanitise_incoming_value(value)
all_text_subnotes = ao_json.notes
.select{|note| note['jsonmodel_type'] == 'note_multipart' && note['type'] == column.name.to_s}
.map{|note| note['subnotes']}
.flatten
.select{|subnote| subnote['jsonmodel_type'] == 'note_text'}

if (subnote_to_update = all_text_subnotes_by_type[column.name].fetch(column.index, nil))
if (subnote_to_update = all_text_subnotes.fetch(column.index, nil))
clean_value = column.sanitise_incoming_value(value)
if subnote_to_update['content'] != clean_value
record_changed = true
subnote_to_update['content'] = clean_value
end
elsif clean_value != nil && clean_value != ''
record_changed = true
sub_note = {
'jsonmodel_type' => 'note_text',
'content' => clean_value
}.merge(SUBRECORD_DEFAULTS.fetch(:note_text, {}))

ao_json.notes << {
'jsonmodel_type' => 'note_multipart',
'type' => column.name.to_s,
'subnotes' => [sub_note],
}.merge(SUBRECORD_DEFAULTS.fetch(column.jsonmodel, {}))

all_text_subnotes_by_type[column.name] << sub_note
else
# FIXME maybe a delete?
end
else
clean_value = column.sanitise_incoming_value(value)

subrecord_by_index[column.path_prefix] ||= Array(ao_json[column.path_prefix]).each_with_index.map{|subrecord, index| [index, subrecord]}.to_h

if (subrecord_to_update = subrecord_by_index.fetch(column.path_prefix).fetch(column.index, nil))
if (subrecord_to_update = Array(ao_json[column.path_prefix]).fetch(column.index, nil))
clean_value = column.sanitise_incoming_value(value)
if subrecord_to_update[column.name.to_s] != clean_value
record_changed = true
subrecord_to_update[column.name.to_s] = clean_value
end
elsif clean_value != nil && clean_value != ''
record_changed = true
subrecord_by_index.fetch(column.path_prefix)[column.index] = {
'jsonmodel_type' => column.jsonmodel.to_s,
column.name.to_s => clean_value,
}.merge(SUBRECORD_DEFAULTS.fetch(column.jsonmodel, {}))
else
# FIXME maybe a delete?
end
end
end

# apply subrecords to the json, drop nils
subrecord_by_index.keys.each do |path|
ao_json[path] = []
subrecord_by_index[path].each do |_, subrecord|
next if subrecord.nil?
ao_json[path] << subrecord
end
end

if record_changed
ao_json['position'] = nil
ao.update_from_json(ao_json)
Expand Down

0 comments on commit a6750af

Please sign in to comment.