Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 15 additions & 14 deletions lib/folio/client/job_execution_manager.rb
Original file line number Diff line number Diff line change
@@ -1,35 +1,36 @@
# frozen_string_literal: true

class Folio::Client::JobExecutionManager
def initialize(folio_client, job_profile_uuid, batch_size)
def initialize(folio_client, job_profile_uuid, total_number_of_records, batch_size)
@folio_client = folio_client
@job_profile_uuid = job_profile_uuid
@batch_size = batch_size
end

# Executes a complete FOLIO job with the given processed records
# @param processed_records [Array<Hash>] Array of { marc_record: MARC::Record, metadata: Hash }
# @return [Folio::Client::JobExecutionSummary] The completed job execution summary
def execute_job(processed_records)
# Create JobExecution
job_execution = @folio_client.create_job_execution(
@job_execution = @folio_client.create_job_execution(
@job_profile_uuid,
'MARC',
processed_records.length,
@batch_size
total_number_of_records,
batch_size
)
end

# Adds the given processed records to the JobExecution that we are setting up.
# @param processed_records [Array<Hash>] Array of { marc_record: MARC::Record, metadata: Hash }
def add_records(processed_records)
# Add records to JobExecution
processed_records.each do |processed_record|
job_execution.add_record(
@job_execution.add_record(
processed_record[:marc_record],
processed_record[:metadata]
)
end
end

# Executes a complete FOLIO job with all of the processed records previously added through the #add_records method.
# @return [Folio::Client::JobExecutionSummary] The completed job execution summary
def execute_job
# Start and wait for completion
job_execution.start
job_execution_summary = job_execution.wait_until_complete
@job_execution.start
job_execution_summary = @job_execution.wait_until_complete

Rails.logger.info("Batch completed: #{job_execution_summary.records_processed} records processed")

Expand Down
28 changes: 13 additions & 15 deletions lib/folio_sync/archives_space_to_folio/batch_processor.rb
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,21 @@ def initialize(instance_key)
# Processes records in batches and sends them to FOLIO
# @param records [ActiveRecord::Relation] Collection of AspaceToFolioRecord objects
def process_records(records)
# Execute the FOLIO job
# Use the :: prefix to avoid namespace issues
@job_manager = ::Folio::Client::JobExecutionManager.new(@folio_client, job_profile_uuid, batch_size)

records.in_batches(of: batch_size) do |batch|
process_batch(batch)
end

# All batches are executed in a single FOLIO Job Execution
job_execution_summary = @job_manager.execute

result_processor = FolioSync::ArchivesSpaceToFolio::JobResultProcessor.new(@folio_reader, @folio_writer, @instance_key)
result_processor.process_results(job_execution_summary)
job_result_errors = result_processor.processing_errors
@syncing_errors.concat(job_result_errors)
@syncing_errors.concat(@record_processor.processing_errors)
end

Expand All @@ -41,7 +52,8 @@ def process_batch(records_batch)

return if processed_records.empty?

submit_batch_to_folio(processed_records)
# Process the results (suppression updates and database record updates)
job_manager.add_records(processed_records)
rescue StandardError => e
error = FolioSync::Errors::SyncingError.new(
message: "Failed to process a batch of #{records_batch.count} records: #{e.message}"
Expand All @@ -50,20 +62,6 @@ def process_batch(records_batch)
Rails.logger.error("Error processing batch: #{e.message}")
end

def submit_batch_to_folio(processed_records)
# Execute the FOLIO job
# Use the :: prefix to avoid namespace issues
job_manager = ::Folio::Client::JobExecutionManager.new(@folio_client, job_profile_uuid, batch_size)
job_execution_summary = job_manager.execute_job(processed_records)

# Process the results (suppression updates and database record updates)
result_processor = FolioSync::ArchivesSpaceToFolio::JobResultProcessor.new(@folio_reader, @folio_writer, @instance_key)
result_processor.process_results(job_execution_summary)

job_result_errors = result_processor.processing_errors
@syncing_errors.concat(job_result_errors)
end

def batch_size
Rails.configuration.folio_sync.dig(:aspace_to_folio, :batch_size) || DEFAULT_BATCH_SIZE
end
Expand Down
Loading