diff --git a/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java b/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java index 286fa5593..fae1637b6 100644 --- a/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java +++ b/src/main/java/com/conveyal/analysis/controllers/AggregationAreaController.java @@ -10,6 +10,7 @@ import com.conveyal.analysis.persistence.AnalysisDB; import com.conveyal.analysis.util.JsonUtil; import com.conveyal.file.FileStorage; +import com.conveyal.file.UrlWithHumanName; import com.conveyal.r5.analyst.progress.Task; import com.fasterxml.jackson.databind.node.ObjectNode; import org.bson.conversions.Bson; @@ -27,6 +28,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.mongodb.client.model.Filters.and; import static com.mongodb.client.model.Filters.eq; +import static org.eclipse.jetty.http.MimeTypes.Type.APPLICATION_JSON; /** * Stores vector aggregationAreas (used to define the region of a weighted average accessibility metric). @@ -98,10 +100,10 @@ private Collection getAggregationAreas (Request req, Response r } /** Returns a JSON-wrapped URL for the mask grid of the aggregation area whose id matches the path parameter. */ - private ObjectNode getAggregationAreaGridUrl (Request req, Response res) { + private UrlWithHumanName getAggregationAreaGridUrl (Request req, Response res) { AggregationArea aggregationArea = aggregationAreaCollection.findPermittedByRequestParamId(req); - String url = fileStorage.getURL(aggregationArea.getStorageKey()); - return JsonUtil.objectNode().put("url", url); + res.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(aggregationArea.getStorageKey(), aggregationArea.name, "grid"); } @Override diff --git a/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java b/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java index b1afcfc05..c28363e0a 100644 --- a/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java +++ b/src/main/java/com/conveyal/analysis/controllers/OpportunityDatasetController.java @@ -18,6 +18,7 @@ import com.conveyal.file.FileStorageFormat; import com.conveyal.file.FileStorageKey; import com.conveyal.file.FileUtils; +import com.conveyal.file.UrlWithHumanName; import com.conveyal.r5.analyst.FreeFormPointSet; import com.conveyal.r5.analyst.Grid; import com.conveyal.r5.analyst.PointSet; @@ -61,6 +62,7 @@ import static com.conveyal.file.FileCategory.GRIDS; import static com.conveyal.r5.analyst.WebMercatorExtents.parseZoom; import static com.conveyal.r5.analyst.progress.WorkProductType.OPPORTUNITY_DATASET; +import static org.eclipse.jetty.http.MimeTypes.Type.APPLICATION_JSON; /** * Controller that handles fetching opportunity datasets (grids and other pointset formats). @@ -94,10 +96,6 @@ public OpportunityDatasetController ( /** Store upload status objects FIXME trivial Javadoc */ private final List uploadStatuses = new ArrayList<>(); - private ObjectNode getJsonUrl (FileStorageKey key) { - return JsonUtil.objectNode().put("url", fileStorage.getURL(key)); - } - private void addStatusAndRemoveOldStatuses(OpportunityDatasetUploadStatus status) { uploadStatuses.add(status); LocalDateTime now = LocalDateTime.now(); @@ -113,10 +111,11 @@ private Collection getRegionDatasets(Request req, Response r ); } - private Object getOpportunityDataset(Request req, Response res) { + private UrlWithHumanName getOpportunityDataset(Request req, Response res) { OpportunityDataset dataset = Persistence.opportunityDatasets.findByIdFromRequestIfPermitted(req); if (dataset.format == FileStorageFormat.GRID) { - return getJsonUrl(dataset.getStorageKey()); + res.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(dataset.getStorageKey(), dataset.sourceName + "_" + dataset.name, "grid"); } else { // Currently the UI can only visualize grids, not other kinds of datasets (freeform points). // We do generate a rasterized grid for each of the freeform pointsets we create, so ideally we'd redirect @@ -564,9 +563,10 @@ private List createGridsFromShapefile(List fileItems, * Respond to a request with a redirect to a downloadable file. * @param req should specify regionId, opportunityDatasetId, and an available download format (.tiff or .grid) */ - private Object downloadOpportunityDataset (Request req, Response res) throws IOException { + private UrlWithHumanName downloadOpportunityDataset (Request req, Response res) throws IOException { FileStorageFormat downloadFormat; String format = req.params("format"); + res.type(APPLICATION_JSON.asString()); try { downloadFormat = FileStorageFormat.valueOf(format.toUpperCase()); } catch (IllegalArgumentException iae) { @@ -576,38 +576,32 @@ private Object downloadOpportunityDataset (Request req, Response res) throws IOE String regionId = req.params("_id"); String gridKey = format; FileStorageKey storageKey = new FileStorageKey(GRIDS, String.format("%s/%s.grid", regionId, gridKey)); - return getJsonUrl(storageKey); + return fileStorage.getJsonUrl(storageKey, gridKey, "grid"); + } + if (FileStorageFormat.GRID.equals(downloadFormat)) { + return getOpportunityDataset(req, res); } - - if (FileStorageFormat.GRID.equals(downloadFormat)) return getOpportunityDataset(req, res); - final OpportunityDataset opportunityDataset = Persistence.opportunityDatasets.findByIdFromRequestIfPermitted(req); - FileStorageKey gridKey = opportunityDataset.getStorageKey(FileStorageFormat.GRID); FileStorageKey formatKey = opportunityDataset.getStorageKey(downloadFormat); - // if this grid is not on S3 in the requested format, try to get the .grid format if (!fileStorage.exists(gridKey)) { throw AnalysisServerException.notFound("Requested grid does not exist."); } - if (!fileStorage.exists(formatKey)) { // get the grid and convert it to the requested format File gridFile = fileStorage.getFile(gridKey); Grid grid = Grid.read(new GZIPInputStream(new FileInputStream(gridFile))); // closes input stream File localFile = FileUtils.createScratchFile(downloadFormat.toString()); FileOutputStream fos = new FileOutputStream(localFile); - if (FileStorageFormat.PNG.equals(downloadFormat)) { grid.writePng(fos); } else if (FileStorageFormat.GEOTIFF.equals(downloadFormat)) { grid.writeGeotiff(fos); } - fileStorage.moveIntoStorage(formatKey, localFile); } - - return getJsonUrl(formatKey); + return fileStorage.getJsonUrl(formatKey, opportunityDataset.sourceName + "_" + opportunityDataset.name, downloadFormat.extension); } /** diff --git a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java index 7f6189322..2d169187f 100644 --- a/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java +++ b/src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java @@ -8,6 +8,7 @@ import com.conveyal.analysis.models.AnalysisRequest; import com.conveyal.analysis.models.OpportunityDataset; import com.conveyal.analysis.models.RegionalAnalysis; +import com.conveyal.file.UrlWithHumanName; import com.conveyal.analysis.persistence.Persistence; import com.conveyal.analysis.results.CsvResultType; import com.conveyal.analysis.util.JsonUtil; @@ -20,8 +21,6 @@ import com.conveyal.r5.analyst.PointSet; import com.conveyal.r5.analyst.PointSetCache; import com.conveyal.r5.analyst.cluster.RegionalTask; -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.net.HttpHeaders; import com.google.common.primitives.Ints; import com.mongodb.QueryBuilder; import gnu.trove.list.array.TIntArrayList; @@ -46,20 +45,21 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Set; import java.util.zip.GZIPOutputStream; import static com.conveyal.analysis.util.JsonUtil.toJson; import static com.conveyal.file.FileCategory.BUNDLES; import static com.conveyal.file.FileCategory.RESULTS; +import static com.conveyal.file.UrlWithHumanName.filenameCleanString; import static com.conveyal.r5.transit.TransportNetworkCache.getScenarioFilename; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static org.eclipse.jetty.http.MimeTypes.Type.APPLICATION_JSON; +import static org.eclipse.jetty.http.MimeTypes.Type.TEXT_HTML; /** * Spark HTTP handler methods that allow launching new regional analyses, as well as deleting them and fetching @@ -328,28 +328,15 @@ private Object getAllRegionalResults (Request req, Response res) throws IOExcept } fileStorage.moveIntoStorage(zippedResultsKey, tempZipFile); } - String humanReadableZipName = friendlyAnalysisName + ".zip"; - res.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"%s\"".formatted(humanReadableZipName)); - return JsonUtil.toJsonString(JsonUtil.objectNode() - .put("url", fileStorage.getURL(zippedResultsKey)) - .put("name", humanReadableZipName) - ); - } - - private String filenameCleanString (String original) { - String ret = original.replaceAll("\\W+", "_"); - if (ret.length() > 20) { - ret = ret.substring(0, 20); - } - return ret; +// res.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"%s\"".formatted(humanReadableZipName)); + res.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(zippedResultsKey, analysis.name, "zip"); } /** * This used to extract a particular percentile of a regional analysis as a grid file. - * Now it just gets the single percentile that exists for any one analysis, either from the local buffer file - * for an analysis still in progress, or from S3 for a completed analysis. */ - private Object getRegionalResults (Request req, Response res) throws IOException { + private UrlWithHumanName getRegionalResults (Request req, Response res) throws IOException { // Get some path parameters out of the URL. // The UUID of the regional analysis for which we want the output data @@ -431,12 +418,20 @@ private Object getRegionalResults (Request req, Response res) throws IOException FileStorageKey singleCutoffFileStorageKey = getSingleCutoffGrid( analysis, cutoffIndex, destinationPointSetId, percentile, format ); - return JsonUtil.toJsonString( - JsonUtil.objectNode().put("url", fileStorage.getURL(singleCutoffFileStorageKey)) - ); + res.type(APPLICATION_JSON.asString()); + // Substitute human readable name and shorten destination UUID. + // TODO better system for resolving UUID to human readable names in single and multi-grid cases + // Or maybe only allow multi-grid download for saving by end user, and single grid is purely internal. + int firstUnderscore = singleCutoffFileStorageKey.path.indexOf('_'); + int secondUnderscore = singleCutoffFileStorageKey.path.indexOf('_', firstUnderscore + 1); + int lastDot = singleCutoffFileStorageKey.path.lastIndexOf('.'); + String humanName = analysis.name + + singleCutoffFileStorageKey.path.substring(firstUnderscore, firstUnderscore + 6) + + singleCutoffFileStorageKey.path.substring(secondUnderscore, lastDot); + return fileStorage.getJsonUrl(singleCutoffFileStorageKey, humanName, format.extension); } - private String getCsvResults (Request req, Response res) { + private Object getCsvResults (Request req, Response res) { final String regionalAnalysisId = req.params("_id"); final CsvResultType resultType = CsvResultType.valueOf(req.params("resultType").toUpperCase()); // If the resultType parameter received on the API is unrecognized, valueOf throws IllegalArgumentException @@ -458,7 +453,10 @@ private String getCsvResults (Request req, Response res) { FileStorageKey fileStorageKey = new FileStorageKey(RESULTS, storageKey); - res.type("text/plain"); + // TODO handle JSON with human name on UI side + // res.type(APPLICATION_JSON.asString()); + // return fileStorage.getJsonUrl(fileStorageKey, analysis.name, resultType + ".csv"); + res.type(TEXT_HTML.asString()); return fileStorage.getURL(fileStorageKey); } @@ -652,7 +650,7 @@ private RegionalAnalysis updateRegionalAnalysis (Request request, Response respo * Return a JSON-wrapped URL for the file in FileStorage containing the JSON representation of the scenario for * the given regional analysis. */ - private JsonNode getScenarioJsonUrl (Request request, Response response) { + private UrlWithHumanName getScenarioJsonUrl (Request request, Response response) { RegionalAnalysis regionalAnalysis = Persistence.regionalAnalyses.findByIdIfPermitted( request.params("_id"), DBProjection.exclude("request.scenario.modifications"), @@ -663,9 +661,9 @@ private JsonNode getScenarioJsonUrl (Request request, Response response) { final String scenarioId = regionalAnalysis.request.scenarioId; checkNotNull(networkId, "RegionalAnalysis did not contain a network ID."); checkNotNull(scenarioId, "RegionalAnalysis did not contain an embedded request with scenario ID."); - String scenarioUrl = fileStorage.getURL( - new FileStorageKey(BUNDLES, getScenarioFilename(regionalAnalysis.bundleId, scenarioId))); - return JsonUtil.objectNode().put("url", scenarioUrl); + FileStorageKey scenarioKey = new FileStorageKey(BUNDLES, getScenarioFilename(regionalAnalysis.bundleId, scenarioId)); + response.type(APPLICATION_JSON.asString()); + return fileStorage.getJsonUrl(scenarioKey, regionalAnalysis.name, "scenario.json"); } @Override @@ -676,11 +674,12 @@ public void registerEndpoints (spark.Service sparkService) { }); sparkService.path("/api/regional", () -> { // For grids, no transformer is supplied: render raw bytes or input stream rather than transforming to JSON. + // Wait, do we have any endpoints that write grids into responses? It looks like they all return URLs now. sparkService.get("/:_id", this::getRegionalAnalysis); - sparkService.get("/:_id/all", this::getAllRegionalResults); - sparkService.get("/:_id/grid/:format", this::getRegionalResults); - sparkService.get("/:_id/csv/:resultType", this::getCsvResults); - sparkService.get("/:_id/scenarioJsonUrl", this::getScenarioJsonUrl); + sparkService.get("/:_id/all", this::getAllRegionalResults, toJson); + sparkService.get("/:_id/grid/:format", this::getRegionalResults, toJson); + sparkService.get("/:_id/csv/:resultType", this::getCsvResults, toJson); + sparkService.get("/:_id/scenarioJsonUrl", this::getScenarioJsonUrl, toJson); sparkService.delete("/:_id", this::deleteRegionalAnalysis, toJson); sparkService.post("", this::createRegionalAnalysis, toJson); sparkService.put("/:_id", this::updateRegionalAnalysis, toJson); diff --git a/src/main/java/com/conveyal/file/FileStorage.java b/src/main/java/com/conveyal/file/FileStorage.java index 52de21316..8c5ef0122 100644 --- a/src/main/java/com/conveyal/file/FileStorage.java +++ b/src/main/java/com/conveyal/file/FileStorage.java @@ -94,4 +94,9 @@ default InputStream getInputStream (FileCategory fileCategory, String fileName) } } + default UrlWithHumanName getJsonUrl (FileStorageKey key, String rawHumanName, String humanExtension) { + String url = this.getURL(key); + return UrlWithHumanName.fromCleanedName(url, rawHumanName, humanExtension); + } + } diff --git a/src/main/java/com/conveyal/file/UrlWithHumanName.java b/src/main/java/com/conveyal/file/UrlWithHumanName.java new file mode 100644 index 000000000..c80815b93 --- /dev/null +++ b/src/main/java/com/conveyal/file/UrlWithHumanName.java @@ -0,0 +1,39 @@ +package com.conveyal.file; + +/** + * Combines a url for downloading a file, which might include a globally unique but human-annoying UUID, with a + * suggested human-readable name for that file when saved by an end user. The humanName may not be globally unique, + * so is only appropriate for cases where it doesn't need to be machine discoverable using a UUID. The humanName can + * be used as the download attribute of an HTML link, or as the attachment name in a content-disposition header. + * Instances of this record are intended to be serialized to JSON as an HTTP API response. + * // TODO make this into a class with factory methods and move static cleanFilename method here. + */ +public class UrlWithHumanName { + public final String url; + public final String humanName; + + public UrlWithHumanName (String url, String humanName) { + this.url = url; + this.humanName = humanName; + } + + private static final int TRUNCATE_FILENAME_CHARS = 220; + + /** + * Given an arbitrary string, make it safe for use as a friendly human-readable filename. + * This can yield non-unique strings and is intended for files downloaded by end users that do not need to be + * machine-discoverable by unique IDs. + */ + public static String filenameCleanString (String original) { + String ret = original.replaceAll("\\W+", "_"); + if (ret.length() > TRUNCATE_FILENAME_CHARS) { + ret = ret.substring(0, TRUNCATE_FILENAME_CHARS); + } + return ret; + } + + public static UrlWithHumanName fromCleanedName (String url, String rawHumanName, String humanExtension) { + String humanName = UrlWithHumanName.filenameCleanString(rawHumanName) + "." + humanExtension; + return new UrlWithHumanName(url, humanName); + } +}