diff --git a/src/main/java/com/conveyal/datatools/manager/models/transform/AppendToFileTransformation.java b/src/main/java/com/conveyal/datatools/manager/models/transform/AppendToFileTransformation.java new file mode 100644 index 000000000..d290afd66 --- /dev/null +++ b/src/main/java/com/conveyal/datatools/manager/models/transform/AppendToFileTransformation.java @@ -0,0 +1,81 @@ + +package com.conveyal.datatools.manager.models.transform; + +import com.conveyal.datatools.common.status.MonitorableJob; +import com.conveyal.datatools.manager.models.TableTransformResult; +import com.conveyal.datatools.manager.models.TransformType; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; + +public class AppendToFileTransformation extends ZipTransformation { + + public static AppendToFileTransformation create(String csvData, String table) { + AppendToFileTransformation transformation = new AppendToFileTransformation(); + transformation.csvData = csvData; + transformation.table = table; + return transformation; + } + + @Override + public void validateParameters(MonitorableJob.Status status) { + if (csvData == null) { + status.fail("CSV data must not be null"); + } + } + + @Override + public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status status) { + String tableName = table + ".txt"; + Path targetZipPath = Paths.get(zipTarget.gtfsFile.getAbsolutePath()); + + try ( + FileSystem targetZipFs = FileSystems.newFileSystem(targetZipPath, (ClassLoader) null); + InputStream newLineStream = new ByteArrayInputStream("\n".getBytes(StandardCharsets.UTF_8)); + InputStream inputStream = new ByteArrayInputStream(csvData.getBytes(StandardCharsets.UTF_8)); + ) { + TransformType type = TransformType.TABLE_MODIFIED; + + Path targetTxtFilePath = getTablePathInZip(tableName, targetZipFs); + + final File tempFile = File.createTempFile(tableName + "-temp", ".txt"); + Files.copy(targetTxtFilePath, tempFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + + // Append CSV data into the target file in the temporary copy of file + try (OutputStream os = new FileOutputStream(tempFile, true)) { + // Append a newline in case our data doesn't include one + // Having an extra newline is not a problem! + os.write(newLineStream.readAllBytes()); + os.write(inputStream.readAllBytes()); + } catch (Exception e) { + status.fail("Failed to write to target file", e); + } + + // Copy modified file into zip + Files.copy(tempFile.toPath(), targetTxtFilePath, StandardCopyOption.REPLACE_EXISTING); + + final int NEW_LINE_CHARACTER_CODE = 10; + int lineCount = (int) csvData.chars().filter(c -> c == NEW_LINE_CHARACTER_CODE).count(); + zipTarget.feedTransformResult.tableTransformResults.add(new TableTransformResult( + tableName, + type, + 0, + 0, + lineCount + 1, + 0 + )); + } catch (Exception e) { + status.fail("Unknown error encountered while transforming zip file", e); + } + } +} diff --git a/src/main/java/com/conveyal/datatools/manager/models/transform/FeedTransformation.java b/src/main/java/com/conveyal/datatools/manager/models/transform/FeedTransformation.java index b7a275cf1..cce2bebc3 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/transform/FeedTransformation.java +++ b/src/main/java/com/conveyal/datatools/manager/models/transform/FeedTransformation.java @@ -33,7 +33,8 @@ @JsonSubTypes.Type(value = ReplaceFileFromVersionTransformation.class, name = "ReplaceFileFromVersionTransformation"), @JsonSubTypes.Type(value = ReplaceFileFromStringTransformation.class, name = "ReplaceFileFromStringTransformation"), @JsonSubTypes.Type(value = PreserveCustomFieldsTransformation.class, name = "PreserveCustomFieldsTransformation"), - @JsonSubTypes.Type(value = AddCustomFileFromStringTransformation.class, name = "AddCustomFileTransformation") + @JsonSubTypes.Type(value = AddCustomFileFromStringTransformation.class, name = "AddCustomFileTransformation"), + @JsonSubTypes.Type(value = AppendToFileTransformation.class, name = "AppendToFileTransformation") }) public abstract class FeedTransformation implements Serializable { private static final long serialVersionUID = 1L; diff --git a/src/test/java/com/conveyal/datatools/manager/jobs/ArbitraryTransformJobTest.java b/src/test/java/com/conveyal/datatools/manager/jobs/ArbitraryTransformJobTest.java index 9303bf821..eb25b619c 100644 --- a/src/test/java/com/conveyal/datatools/manager/jobs/ArbitraryTransformJobTest.java +++ b/src/test/java/com/conveyal/datatools/manager/jobs/ArbitraryTransformJobTest.java @@ -4,13 +4,13 @@ import com.conveyal.datatools.UnitTest; import com.conveyal.datatools.manager.auth.Auth0Connection; import com.conveyal.datatools.manager.auth.Auth0UserProfile; -import com.conveyal.datatools.manager.models.FeedRetrievalMethod; import com.conveyal.datatools.manager.models.FeedSource; import com.conveyal.datatools.manager.models.FeedVersion; import com.conveyal.datatools.manager.models.Project; import com.conveyal.datatools.manager.models.Snapshot; import com.conveyal.datatools.manager.models.TableTransformResult; import com.conveyal.datatools.manager.models.transform.AddCustomFileFromStringTransformation; +import com.conveyal.datatools.manager.models.transform.AppendToFileTransformation; import com.conveyal.datatools.manager.models.transform.DeleteRecordsTransformation; import com.conveyal.datatools.manager.models.transform.FeedTransformRules; import com.conveyal.datatools.manager.models.transform.FeedTransformation; @@ -26,17 +26,11 @@ import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.supercsv.io.CsvMapReader; -import org.supercsv.prefs.CsvPreference; -import java.io.File; -import java.io.InputStream; import java.io.IOException; -import java.io.InputStreamReader; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.UUID; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; @@ -46,7 +40,6 @@ import static com.conveyal.datatools.TestUtils.createFeedVersion; import static com.conveyal.datatools.TestUtils.zipFolderFiles; import static com.conveyal.datatools.manager.models.FeedRetrievalMethod.MANUALLY_UPLOADED; -import static com.conveyal.datatools.manager.models.FeedRetrievalMethod.VERSION_CLONE; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -192,6 +185,37 @@ void replaceGtfsPlusFileFailsIfSourceIsMissing() throws IOException { assertThat(targetVersion.validationResult, Matchers.nullValue()); } + @Test + void canAppendToStops() throws SQLException, IOException { + sourceVersion = createFeedVersion( + feedSource, + zipFolderFiles("fake-agency-with-only-calendar") + ); + FeedTransformation transformation = AppendToFileTransformation.create(generateStopRow(), "stops"); + FeedTransformRules transformRules = new FeedTransformRules(transformation); + feedSource.transformRules.add(transformRules); + Persistence.feedSources.replace(feedSource.id, feedSource); + // Create new target version (note: the folder has no stop_attributes.txt file) + targetVersion = createFeedVersion( + feedSource, + zipFolderFiles("fake-agency-with-only-calendar-dates") + ); + LOG.info("Checking assertions."); + assertEquals( + 5 + 3, // Magic number should match row count of stops.txt with three extra + targetVersion.feedLoadResult.stops.rowCount, + "stops.txt row count should equal input csv data # of rows + 3 extra rows" + ); + // Check for presence of new stop id in database (one record). + assertThatSqlCountQueryYieldsExpectedCount( + String.format( + "SELECT count(*) FROM %s.stops WHERE stop_id = '%s'", + targetVersion.namespace, + "new" + ), + 1 + ); + } @Test void canReplaceFeedInfo() throws SQLException, IOException { // Generate random UUID for feedId, which gets placed into the csv data. @@ -282,6 +306,12 @@ private static String generateStopsWithCustomFields() { + "\n1234567,customValue3,customValue4"; } + private static String generateStopRow() { + return "new3,new3,appended stop,,37,-122,,,0,123,," + + "\nnew2,new2,appended stop,,37,-122,,,0,123,," + + "\nnew,new,appended stop,,37.06668,-122.07781,,,0,123,,"; + } + private static String generateCustomCsvData() { return "custom_column1,custom_column2,custom_column3" + "\ncustomValue1,customValue2,customValue3"