From 76d89e903abccae17462e199a595bf23222b158d Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 11 Oct 2018 17:08:08 -0400 Subject: [PATCH 1/9] fix(validation): handle zero hops separately if all travel times rounded refs #110 --- .../conveyal/gtfs/error/NewGTFSErrorType.java | 1 + .../conveyal/gtfs/error/SQLErrorStorage.java | 7 ++++ .../gtfs/validator/SpeedTripValidator.java | 41 ++++++++++++++++--- .../gtfs/validator/TripValidator.java | 2 - .../conveyal/gtfs/validator/Validator.java | 20 +++++++-- 5 files changed, 61 insertions(+), 10 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java b/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java index 8fa452d46..02fc5075d 100644 --- a/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java +++ b/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java @@ -36,6 +36,7 @@ public enum NewGTFSErrorType { TABLE_TOO_LONG(Priority.MEDIUM, "Table is too long to record line numbers with a 32-bit integer, overflow will occur."), TIME_ZONE_FORMAT(Priority.MEDIUM, "Time zone format should be X."), REQUIRED_TABLE_EMPTY(Priority.MEDIUM, "This table is required by the GTFS specification but is empty."), + FEED_TRAVEL_TIMES_ROUNDED(Priority.LOW, "All travel times in the feed are rounded to the minute, which may cause unexpected results in routing applications where travel times are zero."), ROUTE_DESCRIPTION_SAME_AS_NAME(Priority.LOW, "The description of a route is identical to its name, so does not add any information."), ROUTE_LONG_NAME_CONTAINS_SHORT_NAME(Priority.LOW, "The long name of a route should complement the short name, not include it."), ROUTE_SHORT_AND_LONG_NAME_MISSING(Priority.MEDIUM, "A route has neither a long nor a short name."), diff --git a/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java b/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java index be42d2504..eea55f3a2 100644 --- a/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java +++ b/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java @@ -11,6 +11,7 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Map; +import java.util.Set; import static com.conveyal.gtfs.util.Util.ensureValidNamespace; @@ -86,6 +87,12 @@ public void storeError (NewGTFSError error) { } } + public void storeErrors (Set errors) { + for (NewGTFSError error : errors) { + storeError(error); + } + } + /** * Commits any outstanding error inserts and returns the error count via a SQL query. */ diff --git a/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java b/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java index 72c22fdf1..4ade8cc5d 100644 --- a/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java @@ -1,14 +1,16 @@ package com.conveyal.gtfs.validator; +import com.conveyal.gtfs.error.NewGTFSError; import com.conveyal.gtfs.error.SQLErrorStorage; import com.conveyal.gtfs.loader.Feed; import com.conveyal.gtfs.model.Route; -import com.conveyal.gtfs.model.ShapePoint; import com.conveyal.gtfs.model.Stop; import com.conveyal.gtfs.model.StopTime; import com.conveyal.gtfs.model.Trip; +import java.util.HashSet; import java.util.List; +import java.util.Set; import static com.conveyal.gtfs.error.NewGTFSErrorType.*; import static com.conveyal.gtfs.util.Util.fastDistance; @@ -20,6 +22,8 @@ public class SpeedTripValidator extends TripValidator { public static final double MIN_SPEED_KPH = 0.5; + private boolean allTravelTimesAreRounded = true; + private Set travelTimeZeroErrors = new HashSet<>(); public SpeedTripValidator(Feed feed, SQLErrorStorage errorStorage) { super(feed, errorStorage); @@ -53,9 +57,13 @@ public void validateTrip(Trip trip, Route route, List stopTimes, List< if (currStopTime.departure_time < currStopTime.arrival_time) { registerError(currStopTime, DEPARTURE_BEFORE_ARRIVAL); } - // TODO detect the case where all travel times are rounded off to minutes (i.e. tt % 60 == 0) - // In that case determine the maximum and minimum possible speed by adding/removing one minute of slack. + // Detect if travel times are rounded off to minutes. + boolean bothTravelTimesRounded = areTravelTimesRounded(prevStopTime); double travelTimeSeconds = currStopTime.arrival_time - prevStopTime.departure_time; + // If travel times are rounded and travel time is zero, determine the maximum and minimum possible speed + // by adding/removing one minute of slack. + if (bothTravelTimesRounded && travelTimeSeconds == 0) + travelTimeSeconds += 60; if (checkDistanceAndTime(distanceMeters, travelTimeSeconds, currStopTime)) { // If distance and time are OK, we've got valid numbers to calculate a travel speed. double kph = (distanceMeters / 1000D) / (travelTimeSeconds / 60D / 60D); @@ -73,6 +81,26 @@ public void validateTrip(Trip trip, Route route, List stopTimes, List< } } + /** + * Completing this feed validator means checking if there were any unrounded travel times in the feed and (if so) + * registering any zero travel time errors that were passed over before the first unrounded travel time was + * encountered. If in fact all travel times are rounded to the minute, store a special feed-wide error in this case. + */ + public void complete (ValidationResult validationResult) { + if (!allTravelTimesAreRounded) storeErrors(travelTimeZeroErrors); + else registerError(NewGTFSError.forFeed(FEED_TRAVEL_TIMES_ROUNDED, null)); + } + + /** + * Check that arrival and departure time for a stop time are rounded to the minute and update + * {@link #allTravelTimesAreRounded} accordingly. + */ + private boolean areTravelTimesRounded(StopTime stopTime) { + boolean bothTravelTimesAreRounded = stopTime.departure_time % 60 == 0 && stopTime.arrival_time % 60 == 0; + if (!bothTravelTimesAreRounded) this.allTravelTimesAreRounded = false; + return bothTravelTimesAreRounded; + } + /** * This just pulls some of the range checking logic out of the main trip checking loop so it's more readable. * @return true if all values are OK @@ -88,7 +116,10 @@ private boolean checkDistanceAndTime (double distanceMeters, double travelTimeSe registerError(stopTime, TRAVEL_TIME_NEGATIVE, travelTimeSeconds); good = false; } else if (travelTimeSeconds == 0) { - registerError(stopTime, TRAVEL_TIME_ZERO); + // Only register the travel time zero error if not all travel times are rounded. Otherwise, hold onto the + // error in the travelTimeZeroErrors collection until the completion of this validator. + if (!allTravelTimesAreRounded) registerError(stopTime, TRAVEL_TIME_ZERO); + else travelTimeZeroErrors.add(createUnregisteredError(stopTime, TRAVEL_TIME_ZERO)); good = false; } return good; @@ -97,7 +128,7 @@ private boolean checkDistanceAndTime (double distanceMeters, double travelTimeSe /** * @return max speed in km/hour. */ - private double getMaxSpeedKph (Route route) { + private static double getMaxSpeedKph (Route route) { int type = -1; if (route != null) type = route.route_type; switch (type) { diff --git a/src/main/java/com/conveyal/gtfs/validator/TripValidator.java b/src/main/java/com/conveyal/gtfs/validator/TripValidator.java index d0164bd84..296742568 100644 --- a/src/main/java/com/conveyal/gtfs/validator/TripValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/TripValidator.java @@ -1,10 +1,8 @@ package com.conveyal.gtfs.validator; -import com.conveyal.gtfs.error.NewGTFSError; import com.conveyal.gtfs.error.SQLErrorStorage; import com.conveyal.gtfs.loader.Feed; import com.conveyal.gtfs.model.Route; -import com.conveyal.gtfs.model.ShapePoint; import com.conveyal.gtfs.model.Stop; import com.conveyal.gtfs.model.StopTime; import com.conveyal.gtfs.model.Trip; diff --git a/src/main/java/com/conveyal/gtfs/validator/Validator.java b/src/main/java/com/conveyal/gtfs/validator/Validator.java index 66d432579..1c4824539 100644 --- a/src/main/java/com/conveyal/gtfs/validator/Validator.java +++ b/src/main/java/com/conveyal/gtfs/validator/Validator.java @@ -6,9 +6,7 @@ import com.conveyal.gtfs.loader.Feed; import com.conveyal.gtfs.model.Entity; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; +import java.util.Set; /** * A Validator examines a whole GTFS feed or a single trip within a GTFS feed. It accumulates error messages for @@ -39,6 +37,22 @@ public void registerError(Entity entity, NewGTFSErrorType errorType) { errorStorage.storeError(NewGTFSError.forEntity(entity, errorType)); } + /** + * Stores a set of errors. + */ + public void storeErrors(Set errors) { + errorStorage.storeErrors(errors); + } + + /** + * WARNING: this method creates but DOES NOT STORE a new GTFS error. It should only be used in cases where a + * collection of errors need to be temporarily held before storing in batch (e.g., waiting to store travel time zero + * errors before it is determined that the entire feed uses travel times rounded to the minute). + */ + NewGTFSError createUnregisteredError (Entity entity, NewGTFSErrorType errorType) { + return NewGTFSError.forEntity(entity, errorType); + } + // /** // * Store an error that affects a single line of a single table. Add a single key-value pair to it. Wraps the // * underlying error constructor. From 88b269e020d8b3319c92160b79b4d9b53590341a Mon Sep 17 00:00:00 2001 From: evansiroky Date: Mon, 19 Nov 2018 23:16:46 -0800 Subject: [PATCH 2/9] refactor(graphql): update graphql a little bit Take some reasonable changes that came out of #144 and add them here --- pom.xml | 2 +- .../gtfs/graphql/GraphQLGtfsSchema.java | 1 + .../conveyal/gtfs/graphql/GraphQLUtil.java | 6 +- .../gtfs/graphql/fetchers/FeedFetcher.java | 3 + .../gtfs/graphql/fetchers/JDBCFetcher.java | 65 ++++++++++--------- .../graphql/fetchers/RowCountFetcher.java | 21 ++++-- .../gtfs/graphql/GTFSGraphQLTest.java | 50 +++++++++----- src/test/resources/graphql/superNested.txt | 23 +++++++ .../resources/graphql/superNestedNoLimits.txt | 23 +++++++ .../canFetchMultiNestedEntities.json | 63 ++++++++++++++++++ ...FetchMultiNestedEntitiesWithoutLimits.json | 63 ++++++++++++++++++ 11 files changed, 262 insertions(+), 58 deletions(-) create mode 100644 src/test/resources/graphql/superNested.txt create mode 100644 src/test/resources/graphql/superNestedNoLimits.txt create mode 100644 src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntities.json create mode 100644 src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntitiesWithoutLimits.json diff --git a/pom.xml b/pom.xml index caac53044..4ca07b391 100644 --- a/pom.xml +++ b/pom.xml @@ -353,7 +353,7 @@ com.graphql-java graphql-java - 4.2 + 11.0 diff --git a/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java b/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java index 23ff013a7..fd82b52cc 100644 --- a/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java +++ b/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java @@ -347,6 +347,7 @@ public class GraphQLGtfsSchema { .type(new GraphQLList(routeType)) .argument(stringArg("namespace")) .argument(stringArg(SEARCH_ARG)) + .argument(intArg(LIMIT_ARG)) .dataFetcher(new NestedJDBCFetcher( new JDBCFetcher("pattern_stops", "stop_id", null, false), new JDBCFetcher("patterns", "pattern_id", null, false), diff --git a/src/main/java/com/conveyal/gtfs/graphql/GraphQLUtil.java b/src/main/java/com/conveyal/gtfs/graphql/GraphQLUtil.java index d74180a82..5002e5ccc 100644 --- a/src/main/java/com/conveyal/gtfs/graphql/GraphQLUtil.java +++ b/src/main/java/com/conveyal/gtfs/graphql/GraphQLUtil.java @@ -1,9 +1,9 @@ package com.conveyal.gtfs.graphql; -import graphql.schema.FieldDataFetcher; import graphql.schema.GraphQLArgument; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLList; +import graphql.schema.PropertyDataFetcher; import static graphql.Scalars.GraphQLFloat; import static graphql.Scalars.GraphQLInt; @@ -17,7 +17,7 @@ public static GraphQLFieldDefinition string (String name) { return newFieldDefinition() .name(name) .type(GraphQLString) - .dataFetcher(new FieldDataFetcher(name)) + .dataFetcher(new PropertyDataFetcher(name)) .build(); } @@ -25,7 +25,7 @@ public static GraphQLFieldDefinition intt (String name) { return newFieldDefinition() .name(name) .type(GraphQLInt) - .dataFetcher(new FieldDataFetcher(name)) + .dataFetcher(new PropertyDataFetcher(name)) .build(); } diff --git a/src/main/java/com/conveyal/gtfs/graphql/fetchers/FeedFetcher.java b/src/main/java/com/conveyal/gtfs/graphql/fetchers/FeedFetcher.java index 4e505315b..5b5386529 100644 --- a/src/main/java/com/conveyal/gtfs/graphql/fetchers/FeedFetcher.java +++ b/src/main/java/com/conveyal/gtfs/graphql/fetchers/FeedFetcher.java @@ -14,6 +14,8 @@ import java.util.HashMap; import java.util.Map; +import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.validateNamespace; + /** * Fetch the summary row for a particular loaded feed, based on its namespace. * This essentially gets the row from the top-level summary table of all feeds that have been loaded into the database. @@ -25,6 +27,7 @@ public class FeedFetcher implements DataFetcher { @Override public Map get (DataFetchingEnvironment environment) { String namespace = environment.getArgument("namespace"); // This is the unique table prefix (the "schema"). + validateNamespace(namespace); StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder.append(String.format("select * from feeds where namespace = '%s'", namespace)); Connection connection = null; diff --git a/src/main/java/com/conveyal/gtfs/graphql/fetchers/JDBCFetcher.java b/src/main/java/com/conveyal/gtfs/graphql/fetchers/JDBCFetcher.java index 6ab6c90e9..3eac44404 100644 --- a/src/main/java/com/conveyal/gtfs/graphql/fetchers/JDBCFetcher.java +++ b/src/main/java/com/conveyal/gtfs/graphql/fetchers/JDBCFetcher.java @@ -167,12 +167,16 @@ public List> get (DataFetchingEnvironment environment) { * Handle fetching functionality for a given namespace, set of join values, and arguments. This is broken out from * the standard get function so that it can be reused in other fetchers (i.e., NestedJdbcFetcher) */ - List> getResults (String namespace, List parentJoinValues, Map arguments) { + List> getResults ( + String namespace, + List parentJoinValues, + Map graphQLQueryArguments + ) { // Track the parameters for setting prepared statement parameters - List parameters = new ArrayList<>(); + List preparedStatementParameters = new ArrayList<>(); // This will contain one Map for each row fetched from the database table. List> results = new ArrayList<>(); - if (arguments == null) arguments = new HashMap<>(); + if (graphQLQueryArguments == null) graphQLQueryArguments = new HashMap<>(); // Ensure namespace exists and is clean. Note: FeedFetcher will have executed before this and validated that an // entry exists in the feeds table and the schema actually exists in the database. validateNamespace(namespace); @@ -188,31 +192,32 @@ List> getResults (String namespace, List parentJoinV sqlBuilder.append("select *"); // We will build up additional sql clauses in this List (note: must be a List so that the order is preserved). - List conditions = new ArrayList<>(); + List whereConditions = new ArrayList<>(); // The order by clause will go here. String sortBy = ""; // If we are fetching an item nested within a GTFS entity in the GraphQL query, we want to add an SQL "where" // clause. This could conceivably be done automatically, but it's clearer to just express the intent. // Note, this is assuming the type of the field in the parent is a string. if (parentJoinField != null && parentJoinValues != null && !parentJoinValues.isEmpty()) { - conditions.add(makeInClause(parentJoinField, parentJoinValues, parameters)); + whereConditions.add(makeInClause(parentJoinField, parentJoinValues, preparedStatementParameters)); } if (sortField != null) { // Sort field is not provided by user input, so it's ok to add here (i.e., it's not prone to SQL injection). sortBy = String.format(" order by %s", sortField); } - Set argumentKeys = arguments.keySet(); + Set argumentKeys = graphQLQueryArguments.keySet(); for (String key : argumentKeys) { // The pagination, bounding box, and date/time args should all be skipped here because they are handled // separately below from standard args (pagination becomes limit/offset clauses, bounding box applies to // stops table, and date/time args filter stop times. All other args become "where X in A, B, C" clauses. if (argsToSkip.contains(key)) continue; if (ID_ARG.equals(key)) { - Integer value = (Integer) arguments.get(key); - conditions.add(String.join(" = ", "id", value.toString())); + Integer value = (Integer) graphQLQueryArguments.get(key); + whereConditions.add(String.join(" = ", "id", value.toString())); } else { - List values = (List) arguments.get(key); - if (values != null && !values.isEmpty()) conditions.add(makeInClause(key, values, parameters)); + List values = (List) graphQLQueryArguments.get(key); + if (values != null && !values.isEmpty()) + whereConditions.add(makeInClause(key, values, preparedStatementParameters)); } } if (argumentKeys.containsAll(boundingBoxArgs)) { @@ -222,7 +227,7 @@ List> getResults (String namespace, List parentJoinV // operating on the patterns table, a SELECT DISTINCT patterns query will be constructed with a join to // stops and pattern stops. for (String bound : boundingBoxArgs) { - Double value = (Double) arguments.get(bound); + Double value = (Double) graphQLQueryArguments.get(bound); // Determine delimiter/equality operator based on min/max String delimiter = bound.startsWith("max") ? " <= " : " >= "; // Determine field based on lat/lon @@ -232,7 +237,7 @@ List> getResults (String namespace, List parentJoinV boundsConditions.add(String.join(delimiter, fieldWithNamespace, value.toString())); } if ("stops".equals(tableName)) { - conditions.addAll(boundsConditions); + whereConditions.addAll(boundsConditions); } else if ("patterns".equals(tableName)) { // Add from table as unique_pattern_ids_in_bounds to match patterns table -> pattern stops -> stops fromTables.add( @@ -243,7 +248,7 @@ List> getResults (String namespace, List parentJoinV String.join(" and ", boundsConditions), namespace )); - conditions.add(String.format("%s.patterns.pattern_id = unique_pattern_ids_in_bounds.pattern_id", namespace)); + whereConditions.add(String.format("%s.patterns.pattern_id = unique_pattern_ids_in_bounds.pattern_id", namespace)); } } if (argumentKeys.contains(DATE_ARG)) { @@ -253,7 +258,7 @@ List> getResults (String namespace, List parentJoinV // service_dates table. In other words, feeds generated by the editor cannot be queried with the date/time args. String tripsTable = String.format("%s.trips", namespace); fromTables.add(tripsTable); - String date = getDateArgument(arguments); + String date = getDateArgument(graphQLQueryArguments); // Gather all service IDs that run on the provided date. fromTables.add(String.format( "(select distinct service_id from %s.service_dates where service_date = ?) as unique_service_ids_in_operation", @@ -261,11 +266,11 @@ List> getResults (String namespace, List parentJoinV ); // Add date to beginning of parameters list (it is used to pre-select a table in the from clause before any // other conditions or parameters are appended). - parameters.add(0, date); + preparedStatementParameters.add(0, date); if (argumentKeys.contains(FROM_ARG) && argumentKeys.contains(TO_ARG)) { // Determine which trips start in the specified time window by joining to filtered stop times. String timeFilteredTrips = "trips_beginning_in_time_period"; - conditions.add(String.format("%s.trip_id = %s.trip_id", timeFilteredTrips, tripsTable)); + whereConditions.add(String.format("%s.trip_id = %s.trip_id", timeFilteredTrips, tripsTable)); // Select all trip IDs that start during the specified time window. Note: the departure and arrival times // are divided by 86399 to account for trips that begin after midnight. FIXME: Should this be 86400? fromTables.add(String.format( @@ -273,16 +278,16 @@ List> getResults (String namespace, List parentJoinV "from (select distinct on (trip_id) * from %s.stop_times order by trip_id, stop_sequence) as first_stop_times " + "where departure_time %% 86399 >= %d and departure_time %% 86399 <= %d) as %s", namespace, - (int) arguments.get(FROM_ARG), - (int) arguments.get(TO_ARG), + (int) graphQLQueryArguments.get(FROM_ARG), + (int) graphQLQueryArguments.get(TO_ARG), timeFilteredTrips)); } // Join trips to service_dates (unique_service_ids_in_operation). - conditions.add(String.format("%s.service_id = unique_service_ids_in_operation.service_id", tripsTable)); + whereConditions.add(String.format("%s.service_id = unique_service_ids_in_operation.service_id", tripsTable)); } if (argumentKeys.contains(SEARCH_ARG)) { // Handle string search argument - String value = (String) arguments.get(SEARCH_ARG); + String value = (String) graphQLQueryArguments.get(SEARCH_ARG); if (!value.isEmpty()) { // Only apply string search if string is not empty. Set searchFields = getSearchFields(namespace); @@ -292,23 +297,23 @@ List> getResults (String namespace, List parentJoinV // FIXME: is ILIKE compatible with non-Postgres? LIKE doesn't work well enough (even when setting // the strings to lower case). searchClauses.add(String.format("%s ILIKE ?", field)); - parameters.add(String.format("%%%s%%", value)); + preparedStatementParameters.add(String.format("%%%s%%", value)); } if (!searchClauses.isEmpty()) { // Wrap string search in parentheses to isolate from other conditions. - conditions.add(String.format(("(%s)"), String.join(" OR ", searchClauses))); + whereConditions.add(String.format(("(%s)"), String.join(" OR ", searchClauses))); } } } sqlBuilder.append(String.format(" from %s", String.join(", ", fromTables))); - if (!conditions.isEmpty()) { + if (!whereConditions.isEmpty()) { sqlBuilder.append(" where "); - sqlBuilder.append(String.join(" and ", conditions)); + sqlBuilder.append(String.join(" and ", whereConditions)); } // The default value for sortBy is an empty string, so it's safe to always append it here. Also, there is no // threat of SQL injection because the sort field value is not user input. sqlBuilder.append(sortBy); - Integer limit = (Integer) arguments.get(LIMIT_ARG); + Integer limit = (Integer) graphQLQueryArguments.get(LIMIT_ARG); if (limit == null) { limit = autoLimit ? DEFAULT_ROWS_TO_FETCH : -1; } @@ -322,7 +327,7 @@ List> getResults (String namespace, List parentJoinV } else { sqlBuilder.append(" limit ").append(limit); } - Integer offset = (Integer) arguments.get(OFFSET_ARG); + Integer offset = (Integer) graphQLQueryArguments.get(OFFSET_ARG); if (offset != null && offset >= 0) { sqlBuilder.append(" offset ").append(offset); } @@ -331,7 +336,7 @@ List> getResults (String namespace, List parentJoinV connection = GTFSGraphQL.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(sqlBuilder.toString()); int oneBasedIndex = 1; - for (String parameter : parameters) { + for (String parameter : preparedStatementParameters) { preparedStatement.setString(oneBasedIndex++, parameter); } // This logging produces a lot of noise during testing due to large numbers of joined sub-queries @@ -381,7 +386,7 @@ private static String getDateArgument(Map arguments) { * @param namespace database schema namespace/table prefix * @return */ - private static void validateNamespace(String namespace) { + public static void validateNamespace(String namespace) { if (namespace == null) { // If namespace is null, do no attempt a query on a namespace that does not exist. throw new IllegalArgumentException("Namespace prefix must be provided."); @@ -437,7 +442,7 @@ private Set getSearchFields(String namespace) { /** * Construct filter clause with '=' (single string) and add values to list of parameters. * */ - static String makeInClause(String filterField, String string, List parameters) { + static String filterEquals(String filterField, String string, List parameters) { // Add string to list of parameters (to be later used to set parameters for prepared statement). parameters.add(string); return String.format("%s = ?", filterField); @@ -448,7 +453,7 @@ static String makeInClause(String filterField, String string, List param * */ static String makeInClause(String filterField, List strings, List parameters) { if (strings.size() == 1) { - return makeInClause(filterField, strings.get(0), parameters); + return filterEquals(filterField, strings.get(0), parameters); } else { // Add strings to list of parameters (to be later used to set parameters for prepared statement). parameters.addAll(strings); diff --git a/src/main/java/com/conveyal/gtfs/graphql/fetchers/RowCountFetcher.java b/src/main/java/com/conveyal/gtfs/graphql/fetchers/RowCountFetcher.java index e1a38a1aa..2c7b45e74 100644 --- a/src/main/java/com/conveyal/gtfs/graphql/fetchers/RowCountFetcher.java +++ b/src/main/java/com/conveyal/gtfs/graphql/fetchers/RowCountFetcher.java @@ -15,7 +15,6 @@ import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -23,7 +22,7 @@ import static com.conveyal.gtfs.graphql.GraphQLUtil.intt; import static com.conveyal.gtfs.graphql.GraphQLUtil.string; import static com.conveyal.gtfs.graphql.GraphQLUtil.stringArg; -import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.makeInClause; +import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.filterEquals; import static graphql.Scalars.GraphQLInt; import static graphql.schema.GraphQLFieldDefinition.newFieldDefinition; import static graphql.schema.GraphQLObjectType.newObject; @@ -71,8 +70,13 @@ public Object get(DataFetchingEnvironment environment) { // FIXME Does this handle null cases? // Add where clause to filter out non-matching results. String filterValue = (String) parentFeedMap.get(filterField); - String inClause = makeInClause(filterField, filterValue, parameters); - clauses.add(String.join(" ", "where", inClause)); + clauses.add( + String.join( + " ", + "where", + filterEquals(filterField, filterValue, parameters) + ) + ); } else if (groupByField != null) { // Handle group by field and optionally handle any filter arguments passed in. if (!argKeys.isEmpty()) { @@ -86,8 +90,13 @@ public Object get(DataFetchingEnvironment environment) { } String filterValue = (String) arguments.get(groupedFilterField); if (filterValue != null) { - String inClause = makeInClause(groupedFilterField, filterValue, parameters); - clauses.add(String.join(" ", "where", inClause)); + clauses.add( + String.join( + " ", + "where", + filterEquals(groupedFilterField, filterValue, parameters) + ) + ); } } // Finally, add group by clause. diff --git a/src/test/java/com/conveyal/gtfs/graphql/GTFSGraphQLTest.java b/src/test/java/com/conveyal/gtfs/graphql/GTFSGraphQLTest.java index 44a197fc6..34a6129e2 100644 --- a/src/test/java/com/conveyal/gtfs/graphql/GTFSGraphQLTest.java +++ b/src/test/java/com/conveyal/gtfs/graphql/GTFSGraphQLTest.java @@ -75,74 +75,74 @@ public static void tearDownClass() { } // tests that the graphQL schema can initialize - @Test + @Test(timeout=5000) public void canInitialize() { GTFSGraphQL.initialize(testDataSource); GraphQL graphQL = GTFSGraphQL.getGraphQl(); } // tests that the root element of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchFeed() throws IOException { assertThat(queryGraphQL("feed.txt"), matchesSnapshot()); } // tests that the row counts of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchFeedRowCounts() throws IOException { assertThat(queryGraphQL("feedRowCounts.txt"), matchesSnapshot()); } // tests that the errors of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchErrors() throws IOException { assertThat(queryGraphQL("feedErrors.txt"), matchesSnapshot()); } // tests that the feed_info of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchFeedInfo() throws IOException { assertThat(queryGraphQL("feedFeedInfo.txt"), matchesSnapshot()); } // tests that the patterns of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchPatterns() throws IOException { assertThat(queryGraphQL("feedPatterns.txt"), matchesSnapshot()); } // tests that the agencies of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchAgencies() throws IOException { assertThat(queryGraphQL("feedAgencies.txt"), matchesSnapshot()); } // tests that the calendars of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchCalendars() throws IOException { assertThat(queryGraphQL("feedCalendars.txt"), matchesSnapshot()); } // tests that the fares of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchFares() throws IOException { assertThat(queryGraphQL("feedFares.txt"), matchesSnapshot()); } // tests that the routes of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchRoutes() throws IOException { assertThat(queryGraphQL("feedRoutes.txt"), matchesSnapshot()); } // tests that the stops of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchStops() throws IOException { assertThat(queryGraphQL("feedStops.txt"), matchesSnapshot()); } // tests that the trips of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchTrips() throws IOException { assertThat(queryGraphQL("feedTrips.txt"), matchesSnapshot()); } @@ -150,19 +150,19 @@ public void canFetchTrips() throws IOException { // TODO: make tests for schedule_exceptions / calendar_dates // tests that the stop times of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchStopTimes() throws IOException { assertThat(queryGraphQL("feedStopTimes.txt"), matchesSnapshot()); } // tests that the stop times of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchServices() throws IOException { assertThat(queryGraphQL("feedServices.txt"), matchesSnapshot()); } // tests that the stop times of a feed can be fetched - @Test + @Test(timeout=5000) public void canFetchRoutesAndFilterTripsByDateAndTime() throws IOException { Map variables = new HashMap(); variables.put("namespace", testNamespace); @@ -176,16 +176,30 @@ public void canFetchRoutesAndFilterTripsByDateAndTime() throws IOException { } // tests that the limit argument applies properly to a fetcher defined with autolimit set to false - @Test + @Test(timeout=5000) public void canFetchNestedEntityWithLimit() throws IOException { assertThat(queryGraphQL("feedStopsStopTimeLimit.txt"), matchesSnapshot()); } + // tests whether a graphQL query that has superflous and redundant nesting can find the right result + // if the graphQL dataloader is enabled correctly, there will not be any repeating sql queries in the logs + @Test(timeout=5000) + public void canFetchMultiNestedEntities() throws IOException { + assertThat(queryGraphQL("superNested.txt"), matchesSnapshot()); + } + // tests whether a graphQL query that has superflous and redundant nesting can find the right result + // if the graphQL dataloader is enabled correctly, there will not be any repeating sql queries in the logs + // furthermore, some queries should have been combined together + @Test(timeout=5000) + public void canFetchMultiNestedEntitiesWithoutLimits() throws IOException { + assertThat(queryGraphQL("superNestedNoLimits.txt"), matchesSnapshot()); + } + /** * attempt to fetch more than one record with SQL injection as inputs * the graphql library should properly escape the string and return 0 results for stops */ - @Test + @Test(timeout=5000) public void canSanitizeSQLInjectionSentAsInput() throws IOException { Map variables = new HashMap(); variables.put("namespace", testInjectionNamespace); @@ -204,7 +218,7 @@ public void canSanitizeSQLInjectionSentAsInput() throws IOException { * attempt run a graphql query when one of the pieces of data contains a SQL injection * the graphql library should properly escape the string and complete the queries */ - @Test + @Test(timeout=5000) public void canSanitizeSQLInjectionSentAsKeyValue() throws IOException, SQLException { // manually update the route_id key in routes and patterns String injection = "'' OR 1=1; Select ''99"; diff --git a/src/test/resources/graphql/superNested.txt b/src/test/resources/graphql/superNested.txt new file mode 100644 index 000000000..3d2ea056b --- /dev/null +++ b/src/test/resources/graphql/superNested.txt @@ -0,0 +1,23 @@ +query ($namespace: String) { + feed(namespace: $namespace) { + feed_version + routes { + route_id + stops { + routes { + route_id + stops { + routes { + route_id + stops { + stop_id + } + } + stop_id + } + } + stop_id + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/graphql/superNestedNoLimits.txt b/src/test/resources/graphql/superNestedNoLimits.txt new file mode 100644 index 000000000..71902d06e --- /dev/null +++ b/src/test/resources/graphql/superNestedNoLimits.txt @@ -0,0 +1,23 @@ + query ($namespace: String) { + feed(namespace: $namespace) { + feed_version + routes(limit: -1) { + route_id + stops(limit: -1) { + routes(limit: -1) { + route_id + stops(limit: -1) { + routes(limit: -1) { + route_id + stops(limit: -1) { + stop_id + } + } + stop_id + } + } + stop_id + } + } + } + } \ No newline at end of file diff --git a/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntities.json b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntities.json new file mode 100644 index 000000000..97f3b64aa --- /dev/null +++ b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntities.json @@ -0,0 +1,63 @@ +{ + "data" : { + "feed" : { + "feed_version" : "1.0", + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "4u6g" + }, { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "johv" + } ] + } ], + "stop_id" : "4u6g" + }, { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "4u6g" + }, { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "johv" + } ] + } ], + "stop_id" : "johv" + } ] + } ] + } + } +} \ No newline at end of file diff --git a/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntitiesWithoutLimits.json b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntitiesWithoutLimits.json new file mode 100644 index 000000000..97f3b64aa --- /dev/null +++ b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchMultiNestedEntitiesWithoutLimits.json @@ -0,0 +1,63 @@ +{ + "data" : { + "feed" : { + "feed_version" : "1.0", + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "4u6g" + }, { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "johv" + } ] + } ], + "stop_id" : "4u6g" + }, { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "4u6g" + }, { + "routes" : [ { + "route_id" : "1", + "stops" : [ { + "stop_id" : "4u6g" + }, { + "stop_id" : "johv" + } ] + } ], + "stop_id" : "johv" + } ] + } ], + "stop_id" : "johv" + } ] + } ] + } + } +} \ No newline at end of file From 63938d5c84e8b9423970e71143644f5ec2b72125 Mon Sep 17 00:00:00 2001 From: evansiroky Date: Mon, 26 Nov 2018 20:22:55 -0800 Subject: [PATCH 3/9] refactor(validation): rename variable to avoid confusion at first I thought this was the first calendar date from the calendar_dates.txt file, but then I realized it was actually a result of calculating actual service of various days. Renaming this variable should reflect this distinction and avoid future confusion. --- .../gtfs/validator/ServiceValidator.java | 4 ++-- .../gtfs/validator/ValidationResult.java | 16 +++++++++------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java index a5d3537b5..7c8578810 100644 --- a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java @@ -185,8 +185,8 @@ select durations.service_id, duration_seconds, days_active from ( if (date.isAfter(lastDate)) lastDate = date; } // Copy some useful information into the ValidationResult object to return to the caller. - validationResult.firstCalendarDate = firstDate; - validationResult.lastCalendarDate = lastDate; + validationResult.firstDateOfService = firstDate; + validationResult.lastDateOfService = lastDate; // Is this any different? firstDate.until(lastDate, ChronoUnit.DAYS); int nDays = (int) ChronoUnit.DAYS.between(firstDate, lastDate) + 1; validationResult.dailyBusSeconds = new int[nDays]; diff --git a/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java b/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java index 51bc514b1..63f7449eb 100644 --- a/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java +++ b/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java @@ -1,13 +1,10 @@ package com.conveyal.gtfs.validator; -import java.awt.geom.Rectangle2D; -import java.io.Serializable; -import com.conveyal.gtfs.model.Pattern; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import java.awt.geom.Rectangle2D; +import java.io.Serializable; import java.time.LocalDate; -import java.util.List; /** * An instance of this class is returned by the validator. @@ -26,8 +23,13 @@ public class ValidationResult implements Serializable { public int errorCount; public LocalDate declaredStartDate; public LocalDate declaredEndDate; - public LocalDate firstCalendarDate; - public LocalDate lastCalendarDate; + + /** + * the actual first and last date of service which is calculated in {@link ServiceValidator#complete} + */ + public LocalDate firstDateOfService; + public LocalDate lastDateOfService; + public int[] dailyBusSeconds; public int[] dailyTramSeconds; public int[] dailyMetroSeconds; From 18fe59305c4752bff350716ac2b5c80bde2a1ff4 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 11 Dec 2018 10:38:25 -0500 Subject: [PATCH 4/9] fix(snapshot): fix duplicated schedule exceptions issue with multimap fixes #161 --- .../gtfs/loader/JdbcGtfsSnapshotter.java | 47 +++++++------------ 1 file changed, 17 insertions(+), 30 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java index 2229968ec..96f946044 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java @@ -2,6 +2,9 @@ import com.conveyal.gtfs.model.Calendar; import com.conveyal.gtfs.model.CalendarDate; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimap; +import com.google.common.collect.Sets; import org.apache.commons.dbutils.DbUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,10 +16,8 @@ import java.sql.SQLException; import java.sql.Statement; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; -import java.util.List; import java.util.Map; import static com.conveyal.gtfs.util.Util.randomIdString; @@ -209,21 +210,16 @@ private TableLoadResult createScheduleExceptionsTable() { ); Iterable calendarDates = calendarDatesReader.getAll(); - // keep track of calendars by service id in case we need to add dummy calendar entries + // Keep track of calendars by service id in case we need to add dummy calendar entries. Map calendarsByServiceId = new HashMap<>(); - // iterate through calendar dates to build up to get list of dates with exceptions - HashMap> removedServiceDays = new HashMap<>(); - HashMap> addedServiceDays = new HashMap<>(); - List datesWithExceptions = new ArrayList<>(); + // Iterate through calendar dates to build up to get maps from exceptions to their dates. + Multimap removedServiceForDate = HashMultimap.create(); + Multimap addedServiceForDate = HashMultimap.create(); for (CalendarDate calendarDate : calendarDates) { String date = calendarDate.date.format(DateTimeFormatter.BASIC_ISO_DATE); - datesWithExceptions.add(date); if (calendarDate.exception_type == 1) { - List dateAddedServices = addedServiceDays.getOrDefault(date, new ArrayList<>()); - dateAddedServices.add(calendarDate.service_id); - addedServiceDays.put(date, dateAddedServices); - + addedServiceForDate.put(date, calendarDate.service_id); // create (if needed) and extend range of dummy calendar that would need to be created if we are // copying from a feed that doesn't have the calendar.txt file Calendar calendar = calendarsByServiceId.getOrDefault(calendarDate.service_id, new Calendar()); @@ -236,33 +232,24 @@ private TableLoadResult createScheduleExceptionsTable() { } calendarsByServiceId.put(calendarDate.service_id, calendar); } else { - List dateRemovedServices = removedServiceDays.getOrDefault(date, new ArrayList<>()); - dateRemovedServices.add(calendarDate.service_id); - removedServiceDays.put(date, dateRemovedServices); + removedServiceForDate.put(date, calendarDate.service_id); } } - - // iterate through days and add to database - // for usability and simplicity of code, don't attempt to find all dates with similar - // added and removed services, but simply create an entry for each found date - for (String dateWithException : datesWithExceptions) { - scheduleExceptionsStatement.setString(1, dateWithException); - String[] dates = {dateWithException}; + // Iterate through dates with added or removed service and add to database. + // For usability and simplicity of code, don't attempt to find all dates with similar + // added and removed services, but simply create an entry for each found date. + for (String date : Sets.union(removedServiceForDate.keySet(), addedServiceForDate.keySet())) { + scheduleExceptionsStatement.setString(1, date); + String[] dates = {date}; scheduleExceptionsStatement.setArray(2, connection.createArrayOf("text", dates)); scheduleExceptionsStatement.setInt(3, 9); // FIXME use better static type scheduleExceptionsStatement.setArray( 4, - connection.createArrayOf( - "text", - addedServiceDays.getOrDefault(dateWithException, new ArrayList<>()).toArray() - ) + connection.createArrayOf("text", addedServiceForDate.get(date).toArray()) ); scheduleExceptionsStatement.setArray( 5, - connection.createArrayOf( - "text", - removedServiceDays.getOrDefault(dateWithException, new ArrayList<>()).toArray() - ) + connection.createArrayOf("text", removedServiceForDate.get(date).toArray()) ); scheduleExceptionsTracker.addBatch(); } From d6edb05c4128dee14720d7e27a2d557aee186d2b Mon Sep 17 00:00:00 2001 From: evansiroky Date: Wed, 12 Dec 2018 10:58:24 -0800 Subject: [PATCH 5/9] refactor(validator): add clarification comment and revert variable renaming --- .../java/com/conveyal/gtfs/validator/ServiceValidator.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java index 7c8578810..7097da1c1 100644 --- a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java @@ -185,8 +185,11 @@ select durations.service_id, duration_seconds, days_active from ( if (date.isAfter(lastDate)) lastDate = date; } // Copy some useful information into the ValidationResult object to return to the caller. - validationResult.firstDateOfService = firstDate; - validationResult.lastDateOfService = lastDate; + // These variables are actually not directly tied to data in the calendar_dates.txt file. Instead, they + // represent the first and last date respectively of any entry in the calendar.txt and calendar_dates.txt + // files. + validationResult.firstCalendarDate = firstDate; + validationResult.lastCalendarDate = lastDate; // Is this any different? firstDate.until(lastDate, ChronoUnit.DAYS); int nDays = (int) ChronoUnit.DAYS.between(firstDate, lastDate) + 1; validationResult.dailyBusSeconds = new int[nDays]; From 7a372a602edf3cbfac46c225f9234b5ae61ccdee Mon Sep 17 00:00:00 2001 From: evansiroky Date: Wed, 12 Dec 2018 11:34:56 -0800 Subject: [PATCH 6/9] refactor(validation): also rename variable in it's class and add comments there too --- .../java/com/conveyal/gtfs/validator/ValidationResult.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java b/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java index 63f7449eb..d79a52444 100644 --- a/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java +++ b/src/main/java/com/conveyal/gtfs/validator/ValidationResult.java @@ -26,9 +26,11 @@ public class ValidationResult implements Serializable { /** * the actual first and last date of service which is calculated in {@link ServiceValidator#complete} + * These variables are actually not directly tied to data in the calendar_dates.txt file. Instead, they represent + * the first and last date respectively of any entry in the calendar.txt and calendar_dates.txt files. */ - public LocalDate firstDateOfService; - public LocalDate lastDateOfService; + public LocalDate firstCalendarDate; + public LocalDate lastCalendarDate; public int[] dailyBusSeconds; public int[] dailyTramSeconds; From ffa2080a3357b4ca6f0b721c2ca7c40ef027b1c3 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 15:14:22 -0500 Subject: [PATCH 7/9] style: address PR comment about bracketed conditional block --- .../java/com/conveyal/gtfs/validator/SpeedTripValidator.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java b/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java index 4ade8cc5d..46bc39d14 100644 --- a/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java @@ -62,8 +62,9 @@ public void validateTrip(Trip trip, Route route, List stopTimes, List< double travelTimeSeconds = currStopTime.arrival_time - prevStopTime.departure_time; // If travel times are rounded and travel time is zero, determine the maximum and minimum possible speed // by adding/removing one minute of slack. - if (bothTravelTimesRounded && travelTimeSeconds == 0) + if (bothTravelTimesRounded && travelTimeSeconds == 0) { travelTimeSeconds += 60; + } if (checkDistanceAndTime(distanceMeters, travelTimeSeconds, currStopTime)) { // If distance and time are OK, we've got valid numbers to calculate a travel speed. double kph = (distanceMeters / 1000D) / (travelTimeSeconds / 60D / 60D); From dddc8fe0ee6124c6d2b63fe9fae92f72a96130c8 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 15:21:07 -0500 Subject: [PATCH 8/9] test: fix test snapshot to include rounded travel times error --- .../gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json index d50b0274a..0eaafe2d9 100644 --- a/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json +++ b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json @@ -17,7 +17,16 @@ "error_id" : 1, "error_type" : "ROUTE_LONG_NAME_CONTAINS_SHORT_NAME", "line_number" : 2 - } ], + }, + { + "bad_value" : null, + "entity_id" : null, + "entity_sequence" : null, + "entity_type" : null, + "error_id" : 2, + "error_type" : "FEED_TRAVEL_TIMES_ROUNDED", + "line_number" : null + } ], "feed_version" : "1.0" } } From 0e15a54cb5950f5b3bb6020bc6d18d6ba527e02c Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 15:36:55 -0500 Subject: [PATCH 9/9] test(snapshot): fix failing error fetch snapshot (whitespace issue) --- .../graphql/GTFSGraphQLTest/canFetchErrors.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json index 553b04ced..274b5b3f3 100644 --- a/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json +++ b/src/test/resources/snapshots/com/conveyal/gtfs/graphql/GTFSGraphQLTest/canFetchErrors.json @@ -18,13 +18,13 @@ "error_type" : "ROUTE_LONG_NAME_CONTAINS_SHORT_NAME", "line_number" : 2 }, { - "bad_value" : null, - "entity_id" : null, - "entity_sequence" : null, - "entity_type" : null, - "error_id" : 2, - "error_type" : "FEED_TRAVEL_TIMES_ROUNDED", - "line_number" : null + "bad_value" : null, + "entity_id" : null, + "entity_sequence" : null, + "entity_type" : null, + "error_id" : 2, + "error_type" : "FEED_TRAVEL_TIMES_ROUNDED", + "line_number" : null }, { "bad_value" : "20170916", "entity_id" : null,