Skip to content

Commit

Permalink
added Daily Indexing
Browse files Browse the repository at this point in the history
  • Loading branch information
niquerio committed Apr 28, 2022
1 parent 22cb502 commit da3386c
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 1 deletion.
4 changes: 3 additions & 1 deletion .env-dev-values
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@ REDIS_URL="redis://redis:6379"
DAILY_ALMA_FILES_PATH="search_full_bibs"
FULL_ALMA_FILES_PATH="search_daily_bibs"
FULL_CATALOG_REINDEX_ALMA_JOB_NAME="Publishing Platform Job Search publish"
DAILY_CATALOG_REINDEX_ALMA_JOB_NAME="Publishing Platform Job Search publish daily"
DAILY_CATALOG_INDEX_ALMA_JOB_NAME="Publishing Platform Job Search publish daily"
REINDEX_SOLR_URL="http://reindex-solr/solr/biblio"
HATCHER_PRODUCTION_SOLR_URL="http://hatcher-solr/solr/biblio"
MACC_PRODUCTION_SOLR_URL="http://macc-solr/solr/biblio"
ALMA_FILES_USER="alma"
ALMA_FILES_HOST="sftp"
SSH_KEY_PATH="/etc/secret-volume/id_rsa"
Expand Down
22 changes: 22 additions & 0 deletions lib/indexing_jobs_generator.rb
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,25 @@ def alma_output_directory
ENV.fetch("FULL_ALMA_FILES_PATH")
end
end

class DailyIndexingJobsGenerator < IndexingJobsGenerator
def self.alma_job_name
ENV.fetch("DAILY_CATALOG_INDEX_ALMA_JOB_NAME")
end

def run
actions.each do |action|
@logger.info action.summary
end
actions.each do |action|
@push_indexing_jobs.call(job_name: action.job_name, files: action.files, solr_url: ENV.fetch("HATCHER_PRODUCTION_SOLR_URL"))
@push_indexing_jobs.call(job_name: action.job_name, files: action.files, solr_url: ENV.fetch("MACC_PRODUCTION_SOLR_URL"))
end
end

private

def alma_output_directory
ENV.fetch("DAILY_ALMA_FILES_PATH")
end
end
3 changes: 3 additions & 0 deletions lib/message_router.rb
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ def self.route(body, logger)
if ReindexJobsGenerator.match?(parsed_body) && ENV.fetch("REINDEX_ON") == "true"
logger.info("Matched Reindex Jobs Generator")
ReindexJobsGenerator.new(data: parsed_body).run
elsif DailyIndexingJobsGenerator.match?(parsed_body) && ENV.fetch("DAILY_INDEX_ON") == "true"
logger.info("Daily Indexing Jobs Generator")
DailyIndexingJobsGenerator.new(data: parsed_body).run
else
logger.info("Did not match anything")
end
Expand Down
44 changes: 44 additions & 0 deletions spec/lib/indexing_jobs_generator_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,47 @@
end
end
end
describe DailyIndexingJobsGenerator do
before(:each) do
@data = JSON.parse(fixture("publishing_job_response.json"))
@data["job_instance"]["name"] = ENV.fetch("DAILY_CATALOG_INDEX_ALMA_JOB_NAME")
@job_id_from_data = @data["id"]
@files = [
"file_#{@job_id_from_data}_new.tar.gz",
"file_#{@job_id_from_data}_delete.tar.gz",
"file_new.tar.gz"
]
@sftp_double = instance_double(SFTP, ls: @files)
@logger_double = instance_double(Logger, info: nil)
@push_bulk_double = double("SidekiqClient", push_bulk: nil)
@push_indexing_jobs = lambda do |job_name:, files:, solr_url:|
@push_bulk_double.push_bulk(job_name, files, solr_url)
end
end
context ".match?" do
it "matches the correct job" do
expect(described_class.match?(@data)).to eq(true)
end
it "does not match when it should not match" do
@data["job_instance"]["name"] = "Not the correct job name"
expect(described_class.match?(@data)).to eq(false)
end
end
subject do
described_class.new(data: @data, sftp: @sftp_double, logger: @logger_double, push_indexing_jobs: @push_indexing_jobs)
end
context "run" do
it "logs actions summary" do
expect(@logger_double).to receive(:info).with("1 file(s) for IndexIt job")
expect(@logger_double).to receive(:info).with("1 file(s) for DeleteIt job")
subject.run
end
it "sends the correct arguments to push_indexing_jobs" do
expect(@push_bulk_double).to receive(:push_bulk).with("IndexIt", [@files[0]], ENV.fetch("MACC_PRODUCTION_SOLR_URL"))
expect(@push_bulk_double).to receive(:push_bulk).with("IndexIt", [@files[0]], ENV.fetch("HATCHER_PRODUCTION_SOLR_URL"))
expect(@push_bulk_double).to receive(:push_bulk).with("DeleteIt", [@files[1]], ENV.fetch("MACC_PRODUCTION_SOLR_URL"))
expect(@push_bulk_double).to receive(:push_bulk).with("DeleteIt", [@files[1]], ENV.fetch("HATCHER_PRODUCTION_SOLR_URL"))
subject.run
end
end
end

0 comments on commit da3386c

Please sign in to comment.