diff --git a/pom.xml b/pom.xml index 6d8a6edf8..1b56d764b 100644 --- a/pom.xml +++ b/pom.xml @@ -272,7 +272,7 @@ com.github.ibi-group gtfs-lib - a3481b0660acf998c6179be89033e6d45b94583f + 6739bc6f50295f930b8f0d0c2ee999066c6f4cc5 diff --git a/src/main/java/com/conveyal/datatools/manager/DataManager.java b/src/main/java/com/conveyal/datatools/manager/DataManager.java index a0aaec2dd..5e3f5e0a9 100644 --- a/src/main/java/com/conveyal/datatools/manager/DataManager.java +++ b/src/main/java/com/conveyal/datatools/manager/DataManager.java @@ -220,12 +220,10 @@ static void registerRoutes() throws IOException { new EditorControllerImpl(EDITOR_API_PREFIX, Table.FEED_INFO, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.NETWORKS, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.ROUTES, DataManager.GTFS_DATA_SOURCE); - new EditorControllerImpl(EDITOR_API_PREFIX, Table.ROUTE_NETWORKS, DataManager.GTFS_DATA_SOURCE); // NOTE: Patterns controller handles updates to nested tables shapes, pattern stops, and frequencies. new EditorControllerImpl(EDITOR_API_PREFIX, Table.PATTERNS, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.SCHEDULE_EXCEPTIONS, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.STOPS, DataManager.GTFS_DATA_SOURCE); - new EditorControllerImpl(EDITOR_API_PREFIX, Table.STOP_AREAS, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.TIME_FRAMES, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.TRANSLATIONS, DataManager.GTFS_DATA_SOURCE); new EditorControllerImpl(EDITOR_API_PREFIX, Table.TRIPS, DataManager.GTFS_DATA_SOURCE); diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/MergeLineContext.java b/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/MergeLineContext.java index 320ba54dd..dedae2168 100644 --- a/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/MergeLineContext.java +++ b/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/MergeLineContext.java @@ -7,6 +7,7 @@ import com.conveyal.gtfs.loader.Field; import com.conveyal.gtfs.loader.ReferenceTracker; import com.conveyal.gtfs.loader.Table; +import com.conveyal.gtfs.util.CsvReaderUtil; import com.csvreader.CsvReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -131,7 +132,7 @@ public void startNewFeed(int feedIndex) throws IOException { keyFieldMissing = false; idScope = makeIdScope(version); - csvReader = table.getCsvReader(feed.zipFile, null); + csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null); // If csv reader is null, the table was not found in the zip file. There is no need // to handle merging this table for this zip file. // No need to iterate over second (active) file if strategy is to simply extend the future GTFS diff --git a/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/StopsMergeLineContext.java b/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/StopsMergeLineContext.java index 4644c8c3a..c1d69f604 100644 --- a/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/StopsMergeLineContext.java +++ b/src/main/java/com/conveyal/datatools/manager/jobs/feedmerge/StopsMergeLineContext.java @@ -4,6 +4,7 @@ import com.conveyal.gtfs.error.NewGTFSError; import com.conveyal.gtfs.loader.Field; import com.conveyal.gtfs.loader.Table; +import com.conveyal.gtfs.util.CsvReaderUtil; import com.csvreader.CsvReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -74,7 +75,7 @@ private void checkThatStopCodesArePopulatedWhereRequired() throws IOException { int stopCodeIndex = getFieldIndex("stop_code"); // Get special stops reader to iterate over every stop and determine if stop_code values // are present. - CsvReader stopsReader = table.getCsvReader(feed.zipFile, null); + CsvReader stopsReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null); while (stopsReader.readRecord()) { stopsCount++; // Special stop records (i.e., a station, entrance, or anything with diff --git a/src/main/java/com/conveyal/datatools/manager/models/transform/NormalizeFieldTransformation.java b/src/main/java/com/conveyal/datatools/manager/models/transform/NormalizeFieldTransformation.java index 92b1ac77a..fd3bb6266 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/transform/NormalizeFieldTransformation.java +++ b/src/main/java/com/conveyal/datatools/manager/models/transform/NormalizeFieldTransformation.java @@ -6,6 +6,7 @@ import com.conveyal.datatools.manager.utils.json.JsonUtil; import com.conveyal.gtfs.loader.Field; import com.conveyal.gtfs.loader.Table; +import com.conveyal.gtfs.util.CsvReaderUtil; import com.csvreader.CsvReader; import org.apache.commons.lang3.StringUtils; import org.apache.commons.text.WordUtils; @@ -199,7 +200,11 @@ public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status st status.fail(String.format("Unsupported GTFS file '%s'", tableName)); return; } - CsvReader csvReader = gtfsTable.getCsvReader(new ZipFile(tempZipPath.toAbsolutePath().toString()), null); + CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName( + gtfsTable, + new ZipFile(tempZipPath.toAbsolutePath().toString()), + null + ); if (csvReader == null) { status.fail(String.format("'Normalize Field' failed because file '%s' was not found in the GTFS archive", tableName)); return; diff --git a/src/main/java/com/conveyal/datatools/manager/models/transform/RemoveNonRevenueTripsTransformation.java b/src/main/java/com/conveyal/datatools/manager/models/transform/RemoveNonRevenueTripsTransformation.java index 065646512..a4269860d 100644 --- a/src/main/java/com/conveyal/datatools/manager/models/transform/RemoveNonRevenueTripsTransformation.java +++ b/src/main/java/com/conveyal/datatools/manager/models/transform/RemoveNonRevenueTripsTransformation.java @@ -5,6 +5,7 @@ import com.conveyal.datatools.manager.utils.GtfsUtils; import com.conveyal.gtfs.loader.Field; import com.conveyal.gtfs.loader.Table; +import com.conveyal.gtfs.util.CsvReaderUtil; import com.csvreader.CsvReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,7 +59,11 @@ public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status st Files.copy(originalZipPath, tempZipPath, StandardCopyOption.REPLACE_EXISTING); Table gtfsTable = GtfsUtils.getGtfsTable("stop_times"); - CsvReader csvReaderForStopTimes = gtfsTable.getCsvReader(new ZipFile(tempZipPath.toAbsolutePath().toString()), null); + CsvReader csvReaderForStopTimes = CsvReaderUtil.getCsvReaderAccordingToFileName( + gtfsTable, + new ZipFile(tempZipPath.toAbsolutePath().toString()), + null + ); final String[] headersForStopTime = csvReaderForStopTimes.getHeaders(); Field[] fieldsFoundInStopTimes = gtfsTable.getFieldsFromFieldHeaders(headersForStopTime, null); Map fieldIndexes = getFieldIndexes(fieldsFoundInStopTimes); @@ -73,7 +78,11 @@ public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status st ); gtfsTable = GtfsUtils.getGtfsTable("trips"); - CsvReader csvReaderForTrips = gtfsTable.getCsvReader(new ZipFile(tempZipPath.toAbsolutePath().toString()), null); + CsvReader csvReaderForTrips = CsvReaderUtil.getCsvReaderAccordingToFileName( + gtfsTable, + new ZipFile(tempZipPath.toAbsolutePath().toString()), + null + ); final String[] headersForTrips = csvReaderForTrips.getHeaders(); Field[] fieldsFoundInStopTrips = gtfsTable.getFieldsFromFieldHeaders(headersForTrips, null); int tripIdFieldIndex = getFieldIndex(fieldsFoundInStopTrips, TRIP_ID_FIELD_NAME); diff --git a/src/main/java/com/conveyal/datatools/manager/utils/MergeFeedUtils.java b/src/main/java/com/conveyal/datatools/manager/utils/MergeFeedUtils.java index 0205ef003..606c39a4d 100644 --- a/src/main/java/com/conveyal/datatools/manager/utils/MergeFeedUtils.java +++ b/src/main/java/com/conveyal/datatools/manager/utils/MergeFeedUtils.java @@ -15,6 +15,7 @@ import com.conveyal.gtfs.loader.Field; import com.conveyal.gtfs.loader.Table; import com.conveyal.gtfs.model.StopTime; +import com.conveyal.gtfs.util.CsvReaderUtil; import com.csvreader.CsvReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -45,7 +46,7 @@ public class MergeFeedUtils { public static Set getIdsForTable(ZipFile zipFile, Table table) throws IOException { Set ids = new HashSet<>(); String keyField = table.getKeyFieldName(); - CsvReader csvReader = table.getCsvReader(zipFile, null); + CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, zipFile, null); if (csvReader == null) { LOG.warn("Table {} not found in zip file: {}", table.name, zipFile.getName()); return ids; @@ -117,7 +118,7 @@ public static Set getAllFields(List feedsToMerge, Table tabl Set sharedFields = new HashSet<>(); // First, iterate over each feed to collect the shared fields that need to be output in the merged table. for (FeedToMerge feed : feedsToMerge) { - CsvReader csvReader = table.getCsvReader(feed.zipFile, null); + CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null); // If csv reader is null, the table was not found in the zip file. if (csvReader == null) { continue;