diff --git a/src/main/java/com/conveyal/analysis/components/broker/Broker.java b/src/main/java/com/conveyal/analysis/components/broker/Broker.java index 7abe1e5f4..cdee72bbe 100644 --- a/src/main/java/com/conveyal/analysis/components/broker/Broker.java +++ b/src/main/java/com/conveyal/analysis/components/broker/Broker.java @@ -455,7 +455,7 @@ public void handleRegionalWorkResult(RegionalWorkResult workResult) { // Store all result files permanently. for (var resultFile : resultFiles.entrySet()) { - this.fileStorage.moveIntoStorage(resultFile.getKey(), resultFile.getValue()); + fileStorage.moveIntoStorage(resultFile.getKey(), resultFile.getValue()); } } catch (Throwable t) { recordJobError(job, ExceptionUtils.stackTraceString(t)); diff --git a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java index f51809d7c..5d405fd58 100644 --- a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java +++ b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java @@ -11,8 +11,14 @@ import com.conveyal.analysis.models.OpportunityDataset; import com.conveyal.analysis.models.RegionalAnalysis; import com.conveyal.analysis.persistence.Persistence; +import com.conveyal.analysis.results.AccessCsvResultWriter; import com.conveyal.analysis.results.CsvResultType; +import com.conveyal.analysis.results.GridResultWriter; import com.conveyal.analysis.results.MultiOriginAssembler; +import com.conveyal.analysis.results.PathCsvResultWriter; +import com.conveyal.analysis.results.RegionalResultWriter; +import com.conveyal.analysis.results.TemporalDensityCsvResultWriter; +import com.conveyal.analysis.results.TimeCsvResultWriter; import com.conveyal.analysis.util.JsonUtil; import com.conveyal.file.FileStorage; import com.conveyal.file.FileStorageFormat; @@ -47,6 +53,7 @@ import static com.conveyal.analysis.util.JsonUtil.toJson; import static com.conveyal.file.FileCategory.BUNDLES; import static com.conveyal.file.FileCategory.RESULTS; +import static com.conveyal.r5.common.Util.notNullOrEmpty; import static com.conveyal.r5.transit.TransportNetworkCache.getScenarioFilename; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; @@ -459,7 +466,7 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro // In fact, there are three separate classes all containing almost the same info: // AnalysisRequest (from UI to backend), RegionalTask (template sent to worker), RegionalAnalysis (in Mongo). // And for regional analyses, two instances of the worker task: the one with the scenario, and the templateTask. - RegionalAnalysis regionalAnalysis = new RegionalAnalysis(); + final RegionalAnalysis regionalAnalysis = new RegionalAnalysis(); regionalAnalysis.request = task; regionalAnalysis.height = task.height; regionalAnalysis.north = task.north; @@ -508,13 +515,16 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro // Persist this newly created RegionalAnalysis to Mongo. // This assigns it creation/update time stamps and an ID, which is needed to name any output CSV files. - regionalAnalysis = Persistence.regionalAnalyses.create(regionalAnalysis); + Persistence.regionalAnalyses.create(regionalAnalysis); // Create the regional job var regionalJob = new Job(task, WorkerTags.fromRegionalAnalysis(regionalAnalysis)); + // Create the result writers + var writers = createResultWriters(regionalAnalysis, task); + // Create the multi-origin assembler with the writers. - var assembler = new MultiOriginAssembler(regionalJob, regionalAnalysis.createResultWriters(task)); + var assembler = new MultiOriginAssembler(regionalJob, writers); // Stored scenario is needed by workers. Must be done ahead of enqueueing the job. storeRegionalAnalysisScenarioJson(task); @@ -530,6 +540,57 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro return regionalAnalysis; } + /** + * Create results writers for this regional analysis and a task. Stores the result paths that are created by the + * writers. + */ + private static List createResultWriters(RegionalAnalysis analysis, RegionalTask task) { + // Create the result writers. Store their result file paths in the database. + var resultWriters = new ArrayList(); + if (!task.makeTauiSite) { + if (task.recordAccessibility) { + if (task.originPointSet != null) { + // Freeform origins - create CSV regional analysis results + var accessWriter = new AccessCsvResultWriter(task); + resultWriters.add(accessWriter); + analysis.resultStorage.put(accessWriter.resultType(), accessWriter.getFileName()); + } else { + // Gridded origins - create gridded regional analysis results + resultWriters.addAll(GridResultWriter.createWritersFromTask(analysis.destinationPointSetIds, task)); + } + } + + if (task.recordTimes) { + var timesWriter = new TimeCsvResultWriter(task); + resultWriters.add(timesWriter); + analysis.resultStorage.put(timesWriter.resultType(), timesWriter.getFileName()); + } + + if (task.includePathResults) { + var pathsWriter = new PathCsvResultWriter(task); + resultWriters.add(pathsWriter); + analysis.resultStorage.put(pathsWriter.resultType(), pathsWriter.getFileName()); + } + + if (task.includeTemporalDensity) { + if (task.originPointSet == null) { + // Gridded origins. The full temporal density information is probably too voluminous to be useful. + // We might want to record a grid of dual accessibility values, but this will require some serious + // refactoring of the GridResultWriter. + // if (job.templateTask.dualAccessibilityThreshold > 0) { ... } + throw new RuntimeException("Temporal density of opportunities cannot be recorded for gridded origin points."); + } else { + var tDensityWriter = new TemporalDensityCsvResultWriter(task); + resultWriters.add(tDensityWriter); + analysis.resultStorage.put(tDensityWriter.resultType(), tDensityWriter.getFileName()); + } + } + + checkArgument(notNullOrEmpty(resultWriters), "A regional analysis should always create at least one grid or CSV file."); + } + return resultWriters; + } + /** * Store the regional analysis scenario as JSON for retrieval by the workers. */ diff --git a/src/main/java/com/conveyal/analysis/models/RegionalAnalysis.java b/src/main/java/com/conveyal/analysis/models/RegionalAnalysis.java index 465d0b240..57fb4c06b 100644 --- a/src/main/java/com/conveyal/analysis/models/RegionalAnalysis.java +++ b/src/main/java/com/conveyal/analysis/models/RegionalAnalysis.java @@ -1,23 +1,12 @@ package com.conveyal.analysis.models; -import com.conveyal.analysis.results.AccessCsvResultWriter; import com.conveyal.analysis.results.CsvResultType; -import com.conveyal.analysis.results.GridResultWriter; -import com.conveyal.analysis.results.PathCsvResultWriter; -import com.conveyal.analysis.results.RegionalResultWriter; -import com.conveyal.analysis.results.TemporalDensityCsvResultWriter; -import com.conveyal.analysis.results.TimeCsvResultWriter; import com.conveyal.r5.analyst.cluster.RegionalTask; import org.locationtech.jts.geom.Geometry; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; -import static com.conveyal.r5.common.Util.notNullOrEmpty; -import static com.google.common.base.Preconditions.checkArgument; - /** * Represents a single regional (multi-origin) accessibility analysis, * which may have more than one percentile and cutoff. @@ -112,56 +101,6 @@ public class RegionalAnalysis extends Model implements Cloneable { */ public Map resultStorage = new HashMap<>(); - /** - * Create results writers for this regional analysis and a task. Stores the result paths that are created by the - * writers. - */ - public List createResultWriters(RegionalTask task) { - // Create the result writers. Store their result file paths in the database. - var resultWriters = new ArrayList(); - if (!task.makeTauiSite) { - if (task.recordAccessibility) { - if (task.originPointSet != null) { - // Freeform origins - create CSV regional analysis results - var accessWriter = new AccessCsvResultWriter(task); - resultWriters.add(accessWriter); - resultStorage.put(accessWriter.resultType(), accessWriter.getFileName()); - } else { - // Gridded origins - create gridded regional analysis results - resultWriters.addAll(GridResultWriter.createWritersFromTask(this, task)); - } - } - - if (task.recordTimes) { - var timesWriter = new TimeCsvResultWriter(task); - resultWriters.add(timesWriter); - resultStorage.put(timesWriter.resultType(), timesWriter.getFileName()); - } - - if (task.includePathResults) { - var pathsWriter = new PathCsvResultWriter(task); - resultWriters.add(pathsWriter); - resultStorage.put(pathsWriter.resultType(), pathsWriter.getFileName()); - } - - if (task.includeTemporalDensity) { - if (task.originPointSet == null) { - // Gridded origins. The full temporal density information is probably too voluminous to be useful. - // We might want to record a grid of dual accessibility values, but this will require some serious - // refactoring of the GridResultWriter. - // if (job.templateTask.dualAccessibilityThreshold > 0) { ... } - throw new RuntimeException("Temporal density of opportunities cannot be recorded for gridded origin points."); - } else { - var tDensityWriter = new TemporalDensityCsvResultWriter(task); - resultWriters.add(tDensityWriter); - resultStorage.put(tDensityWriter.resultType(), tDensityWriter.getFileName()); - } - } - checkArgument(notNullOrEmpty(resultWriters), "A regional analysis should always create at least one grid or CSV file."); - } - return resultWriters; - } - public RegionalAnalysis clone () { try { return (RegionalAnalysis) super.clone(); diff --git a/src/main/java/com/conveyal/analysis/results/GridResultWriter.java b/src/main/java/com/conveyal/analysis/results/GridResultWriter.java index 8994441d1..4971caf5e 100644 --- a/src/main/java/com/conveyal/analysis/results/GridResultWriter.java +++ b/src/main/java/com/conveyal/analysis/results/GridResultWriter.java @@ -1,6 +1,5 @@ package com.conveyal.analysis.results; -import com.conveyal.analysis.models.RegionalAnalysis; import com.conveyal.file.FileCategory; import com.conveyal.file.FileStorageKey; import com.conveyal.file.FileUtils; @@ -76,14 +75,14 @@ public class GridResultWriter implements RegionalResultWriter { * We create one GridResultWriter for each destination pointset and percentile. * Each of those output files contains data for all specified travel time cutoffs at each origin. */ - public static List createWritersFromTask(RegionalAnalysis regionalAnalysis, RegionalTask task) { + public static List createWritersFromTask(String[] destinationPointSetIds, RegionalTask task) { int nPercentiles = task.percentiles.length; - int nDestinationPointSets = task.makeTauiSite ? 0 : task.destinationPointSetKeys.length; + int nDestinationPointSets = destinationPointSetIds.length; // Create one grid writer per percentile and destination pointset. var gridWriters = new ArrayList(); for (int destinationIndex = 0; destinationIndex < nDestinationPointSets; destinationIndex++) { for (int percentileIndex = 0; percentileIndex < nPercentiles; percentileIndex++) { - String destinationPointSetId = regionalAnalysis.destinationPointSetIds[destinationIndex]; + String destinationPointSetId = destinationPointSetIds[destinationIndex]; gridWriters.add(new GridResultWriter( task, percentileIndex, @@ -148,7 +147,9 @@ public static List createWritersFromTask(RegionalAnalysis regi } } - /** Gzip the access grid and upload it to file storage (such as AWS S3). */ + /** + * Gzip the access grid and return the files. + */ @Override public synchronized Map.Entry finish () throws IOException { randomAccessFile.close();