Skip to content

Commit

Permalink
97199 cst add reference id to evidence submissions table when uploadi…
Browse files Browse the repository at this point in the history
…ng document (#19552)

* working files may be in broken state

* finished unit testing for DocumentUpload

* added testing around raising errors by document_upload, and cleaned up comments

* removed unnecessary comment from worker_service.rb

* moved EvidenceSubmission record creation to DocumentUpload, and returned to using perform_async instead of perform_in

* added blank line at the end of constants.rb

* Updated incorrect error for when a claim id is not present
  • Loading branch information
samcoforma authored Nov 26, 2024
1 parent 2a48a9c commit 240954d
Show file tree
Hide file tree
Showing 7 changed files with 189 additions and 58 deletions.
82 changes: 67 additions & 15 deletions app/sidekiq/lighthouse/document_upload.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,13 @@
require 'ddtrace'
require 'timeout'
require 'lighthouse/benefits_documents/worker_service'
require 'lighthouse/benefits_documents/constants'

class Lighthouse::DocumentUpload
include Sidekiq::Job

attr_accessor :user_icn, :document_hash

FILENAME_EXTENSION_MATCHER = /\.\w*$/
OBFUSCATED_CHARACTER_MATCHER = /[a-zA-Z\d]/

Expand Down Expand Up @@ -77,28 +80,77 @@ def self.notify_client
VaNotify::Service.new(NOTIFY_SETTINGS.api_key)
end

def perform(user_icn, document_hash)
client = BenefitsDocuments::WorkerService.new
document, file_body, uploader = nil

Datadog::Tracing.trace('Config/Initialize Upload Document') do
Sentry.set_tags(source: 'documents-upload')
document = LighthouseDocument.new document_hash
def perform(user_icn, document_hash, user_account_uuid, claim_id, tracked_item_id)
@user_icn = user_icn
@document_hash = document_hash

raise Common::Exceptions::ValidationErrors, document_data unless document.valid?
evidence_submission = record_evidence_submission(claim_id, jid, tracked_item_id, user_account_uuid)
initialize_upload_document

uploader = LighthouseDocumentUploader.new(user_icn, document.uploader_ids)
uploader.retrieve_from_store!(document.file_name)
end
Datadog::Tracing.trace('Sidekiq read_for_upload') do
file_body = uploader.read_for_upload
end
Datadog::Tracing.trace('Sidekiq Upload Document') do |span|
span.set_tag('Document File Size', file_body.size)
client.upload_document(file_body, document)
response = client.upload_document(file_body, document) # returns upload response which includes requestId
request_successful = response.dig(:data, :success)
if request_successful
request_id = response.dig(:data, :requestId)
evidence_submission.update(request_id:)
else
raise StandardError
end
end
Datadog::Tracing.trace('Remove Upload Document') do
uploader.remove!
end
end

private

def initialize_upload_document
Datadog::Tracing.trace('Config/Initialize Upload Document') do
Sentry.set_tags(source: 'documents-upload')
validate_document!
uploader.retrieve_from_store!(document.file_name)
end
end

def validate_document!
raise Common::Exceptions::ValidationErrors, document unless document.valid?
end

def client
@client ||= BenefitsDocuments::WorkerService.new
end

def document
@document ||= LighthouseDocument.new(document_hash)
end

def uploader
@uploader ||= LighthouseDocumentUploader.new(user_icn, document.uploader_ids)
end

def perform_initial_file_read
Datadog::Tracing.trace('Sidekiq read_for_upload') do
uploader.read_for_upload
end
end

def file_body
@file_body ||= perform_initial_file_read
end

def record_evidence_submission(claim_id, job_id, tracked_item_id, user_account_uuid)
user_account = UserAccount.find(user_account_uuid)
job_class = self.class.to_s
upload_status = BenefitsDocuments::Constants::UPLOAD_STATUS[:PENDING]

evidence_submission = EvidenceSubmission.find_or_create_by(claim_id:,
tracked_item_id:,
job_id:,
job_class:,
upload_status:)
evidence_submission.user_account = user_account
evidence_submission.save!
evidence_submission
end
end
1 change: 1 addition & 0 deletions db/migrate/20240925160219_create_evidence_submissions.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ def change
create_table :evidence_submissions do |t|
t.string :job_id
t.string :job_class
t.string :request_id
t.string :claim_id
t.references :user_account, null: false, foreign_key: true, type: :uuid
t.json :template_metadata_ciphertext
Expand Down
1 change: 1 addition & 0 deletions db/schema.rb

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 11 additions & 0 deletions lib/lighthouse/benefits_documents/constants.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# frozen_string_literal: true

module BenefitsDocuments
module Constants
UPLOAD_STATUS = {
PENDING: 'IN_PROGRESS',
FAILED: 'FAILED',
SUCCESS: 'SUCCESS'
}.freeze
end
end
52 changes: 19 additions & 33 deletions lib/lighthouse/benefits_documents/service.rb
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,8 @@ def queue_document_upload(params, lighthouse_client_id = nil)
Rails.logger.info('Parameters for document upload', loggable_params)

start_timer = Time.zone.now
claim_id = params[:claimId] || params[:claim_id]
tracked_item_id = params[:trackedItemIds] || params[:tracked_item_ids]

unless claim_id
raise Common::Exceptions::InternalServerError,
ArgumentError.new("Claim with id #{claim_id} not found")
end

jid = submit_document(params[:file], params, lighthouse_client_id)
record_evidence_submission(claim_id, jid, tracked_item_id)
StatsD.measure(STATSD_UPLOAD_LATENCY, Time.zone.now - start_timer, tags: ['is_multifile:false'])
jid
end
Expand All @@ -42,16 +34,9 @@ def queue_multi_image_upload_document(params, lighthouse_client_id = nil)
Rails.logger.info('Parameters for document multi image upload', loggable_params)

start_timer = Time.zone.now
claim_id = params[:claimId] || params[:claim_id]
tracked_item_id = params[:trackedItemIds] || params[:tracked_item_ids]
unless claim_id
raise Common::Exceptions::InternalServerError,
ArgumentError.new("Claim with id #{claim_id} not found")
end

file_to_upload = generate_multi_image_pdf(params[:files])
jid = submit_document(file_to_upload, params, lighthouse_client_id)
record_evidence_submission(claim_id, jid, tracked_item_id)
StatsD.measure(STATSD_UPLOAD_LATENCY, Time.zone.now - start_timer, tags: ['is_multifile:true'])
jid
end
Expand All @@ -64,24 +49,38 @@ def cleanup_after_upload

def submit_document(file, file_params, lighthouse_client_id = nil)
user_icn = @user.icn
user_account_uuid = @user.user_account_uuid
document_data = build_lh_doc(file, file_params)
claim_id = file_params[:claimId] || file_params[:claim_id]
tracked_item_id = file_params[:trackedItemIds] || file_params[:tracked_item_ids]

unless claim_id
raise Common::Exceptions::InternalServerError,
ArgumentError.new('Claim id is required')
end

raise Common::Exceptions::ValidationErrors, document_data unless document_data.valid?

uploader = LighthouseDocumentUploader.new(user_icn, document_data.uploader_ids)
uploader.store!(document_data.file_obj)
# the uploader sanitizes the filename before storing, so set our doc to match
document_data.file_name = uploader.final_filename
if Flipper.enabled?(:cst_synchronous_evidence_uploads, @user)
Lighthouse::DocumentUploadSynchronous.upload(user_icn, document_data.to_serializable_hash)
else
Lighthouse::DocumentUpload.perform_async(user_icn, document_data.to_serializable_hash)
end
document_upload(user_icn, document_data.to_serializable_hash, user_account_uuid,
claim_id, tracked_item_id)
rescue CarrierWave::IntegrityError => e
handle_error(e, lighthouse_client_id, uploader.store_dir)
raise e
end

def document_upload(user_icn, document_hash, user_account_uuid, claim_id, tracked_item_id)
if Flipper.enabled?(:cst_synchronous_evidence_uploads, @user)
Lighthouse::DocumentUploadSynchronous.upload(user_icn, document_hash)
else
Lighthouse::DocumentUpload.perform_async(user_icn, document_hash, user_account_uuid,
claim_id, tracked_item_id)
end
end

def build_lh_doc(file, file_params)
claim_id = file_params[:claimId] || file_params[:claim_id]
tracked_item_ids = file_params[:trackedItemIds] || file_params[:tracked_item_ids]
Expand Down Expand Up @@ -110,19 +109,6 @@ def handle_error(error, lighthouse_client_id, endpoint)
)
end

def record_evidence_submission(claim_id, job_id, tracked_item_id)
user_account = UserAccount.find(@user.user_account_uuid)
job_class = self.class
upload_status = 'pending'
evidence_submission = EvidenceSubmission.new(claim_id:,
tracked_item_id:,
job_id:,
job_class:,
upload_status:)
evidence_submission.user_account = user_account
evidence_submission.save!
end

def generate_multi_image_pdf(image_list)
@base_path = Rails.root.join 'tmp', 'uploads', 'cache', SecureRandom.uuid
img_path = "#{@base_path}/tempFile.jpg"
Expand Down
6 changes: 0 additions & 6 deletions spec/lib/lighthouse/benefits_documents/service_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,6 @@
end.to change(Lighthouse::DocumentUpload.jobs, :size).by(1)
end

it 'records an evidence submission when cst_synchronous_evidence_uploads is false' do
Flipper.disable(:cst_synchronous_evidence_uploads)
service.queue_document_upload(params)
expect(EvidenceSubmission.count).to eq(1)
end

it 'does not enqueue a job when cst_synchronous_evidence_uploads is true' do
VCR.use_cassette('lighthouse/benefits_claims/documents/lighthouse_document_upload_200_pdf') do
Flipper.enable(:cst_synchronous_evidence_uploads)
Expand Down
94 changes: 90 additions & 4 deletions spec/sidekiq/lighthouse/document_upload_spec.rb
Original file line number Diff line number Diff line change
@@ -1,17 +1,62 @@
# frozen_string_literal: true

require 'rails_helper'
require 'sidekiq/testing'
Sidekiq::Testing.fake!

require 'lighthouse/document_upload'
require 'va_notify/service'
require 'lighthouse/benefits_documents/constants'

RSpec.describe Lighthouse::DocumentUpload, type: :job do
subject { described_class }
subject(:job) do
described_class.perform_async(user_icn,
document_data.to_serializable_hash,
user_account_uuid, claim_id,
tracked_item_ids)
end

let(:client_stub) { instance_double(BenefitsDocuments::WorkerService) }
let(:notify_client_stub) { instance_double(VaNotify::Service) }
let(:uploader_stub) { instance_double(LighthouseDocumentUploader) }
let(:user_account) { create(:user_account) }
let(:user_account_uuid) { user_account.id }
let(:filename) { 'doctors-note.pdf' }
let(:file) { Rails.root.join('spec', 'fixtures', 'files', filename).read }
let(:user_icn) { user_account.icn }
let(:tracked_item_ids) { '1234' }
let(:document_type) { 'L029' }
let(:password) { 'Password_123' }
let(:claim_id) { '4567' }
let(:job_class) { 'Lighthouse::DocumentUpload' }
let(:document_data) do
LighthouseDocument.new(
first_name: 'First Name',
participant_id: '1111',
claim_id: claim_id,
# file_obj: file,
uuid: SecureRandom.uuid,
file_extension: 'pdf',
file_name: filename,
tracked_item_id: tracked_item_ids,
document_type:
)
end
let(:response) do
{
data: {
success: true,
requestId: '12345678'
}
}
end
let(:failure_response) do
{
data: {
success: false
}
}
end

let(:issue_instant) { Time.now.to_i }
let(:args) do
Expand All @@ -21,7 +66,7 @@
'failed_at' => issue_instant
}
end
let(:tags) { subject::DD_ZSF_TAGS }
let(:tags) { described_class::DD_ZSF_TAGS }

before do
allow(Rails.logger).to receive(:info)
Expand All @@ -33,6 +78,18 @@
Flipper.enable(:cst_send_evidence_failure_emails)
end

let(:job_id) { job }
let(:evidence_submission_stub) do
evidence_submission = EvidenceSubmission.new(claim_id: '4567',
tracked_item_id: tracked_item_ids,
job_id: job_id,
job_class: described_class,
upload_status: 'pending')
evidence_submission.user_account = user_account
evidence_submission.save!
evidence_submission
end

let(:formatted_submit_date) do
# We want to return all times in EDT
timestamp = Time.at(issue_instant).in_time_zone('America/New_York')
Expand All @@ -44,7 +101,7 @@
it 'enqueues a failure notification mailer to send to the veteran' do
allow(VaNotify::Service).to receive(:new) { notify_client_stub }

subject.within_sidekiq_retries_exhausted_block(args) do
described_class.within_sidekiq_retries_exhausted_block(args) do
expect(notify_client_stub).to receive(:send_email).with(
{
recipient_identifier: { id_value: user_account.icn, id_type: 'ICN' },
Expand All @@ -64,6 +121,35 @@
expect(StatsD).to receive(:increment).with('silent_failure_avoided_no_confirmation', tags:)
end
end

it 'retrieves the file and uploads to Lighthouse' do
allow(LighthouseDocumentUploader).to receive(:new) { uploader_stub }
allow(BenefitsDocuments::WorkerService).to receive(:new) { client_stub }
allow(uploader_stub).to receive(:retrieve_from_store!).with(filename) { file }
allow(uploader_stub).to receive(:read_for_upload) { file }
allow(client_stub).to receive(:upload_document).with(file, document_data)
expect(uploader_stub).to receive(:remove!).once
expect(client_stub).to receive(:upload_document).with(file, document_data).and_return(response)
allow(EvidenceSubmission).to receive(:find_or_create_by)
.with({ claim_id:,
tracked_item_id: tracked_item_ids,
job_id:,
job_class:,
upload_status: BenefitsDocuments::Constants::UPLOAD_STATUS[:PENDING] })
.and_return(evidence_submission_stub)
described_class.drain # runs all queued jobs of this class
# After running DocumentUpload job, there should be a new EvidenceSubmission record
# with the response request_id
expect(EvidenceSubmission.find_by(job_id: job_id).request_id).to eql(response.dig(:data, :requestId))
end

it 'raises an error when Lighthouse returns a failure response' do
allow(client_stub).to receive(:upload_document).with(file, document_data).and_return(failure_response)
expect do
job
described_class.drain
end.to raise_error(StandardError)
end
end

context 'when cst_send_evidence_failure_emails is disabled' do
Expand All @@ -76,7 +162,7 @@
it 'does not enqueue a failure notification mailer to send to the veteran' do
allow(VaNotify::Service).to receive(:new) { notify_client_stub }

subject.within_sidekiq_retries_exhausted_block(args) do
described_class.within_sidekiq_retries_exhausted_block(args) do
expect(notify_client_stub).not_to receive(:send_email)
end
end
Expand Down

0 comments on commit 240954d

Please sign in to comment.