Skip to content

Commit

Permalink
refactor(Update to make use of the new CSVReaderUtil):
Browse files Browse the repository at this point in the history
  • Loading branch information
br648 committed Nov 13, 2024
1 parent a7c4121 commit 1e6769a
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 6 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@
<dependency>
<groupId>com.github.ibi-group</groupId>
<artifactId>gtfs-lib</artifactId>
<version>c9908447dad0720cd606c3c753752e6850f7b572</version>
<version>14f98acdf4a6bc0a496d7e808531b605a9c1d8cf</version>
<!-- Latest dev-flex build on jitpack.io -->
<!-- Exclusions added in order to silence SLF4J warnings about multiple bindings:
http://www.slf4j.org/codes.html#multiple_bindings
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.ReferenceTracker;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -131,7 +132,7 @@ public void startNewFeed(int feedIndex) throws IOException {
keyFieldMissing = false;

idScope = makeIdScope(version);
csvReader = table.getCsvReader(feed.zipFile, null);
csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null);
// If csv reader is null, the table was not found in the zip file. There is no need
// to handle merging this table for this zip file.
// No need to iterate over second (active) file if strategy is to simply extend the future GTFS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.conveyal.gtfs.error.NewGTFSError;
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -74,7 +75,7 @@ private void checkThatStopCodesArePopulatedWhereRequired() throws IOException {
int stopCodeIndex = getFieldIndex("stop_code");
// Get special stops reader to iterate over every stop and determine if stop_code values
// are present.
CsvReader stopsReader = table.getCsvReader(feed.zipFile, null);
CsvReader stopsReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null);
while (stopsReader.readRecord()) {
stopsCount++;
// Special stop records (i.e., a station, entrance, or anything with
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.conveyal.datatools.manager.utils.json.JsonUtil;
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.WordUtils;
Expand Down Expand Up @@ -198,7 +199,11 @@ public void transform(FeedTransformZipTarget zipTarget, MonitorableJob.Status st
Files.copy(originalZipPath, tempZipPath, StandardCopyOption.REPLACE_EXISTING);

Table gtfsTable = GtfsUtils.getGtfsTable(table);
CsvReader csvReader = gtfsTable.getCsvReader(new ZipFile(tempZipPath.toAbsolutePath().toString()), null);
CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(
gtfsTable,
new ZipFile(tempZipPath.toAbsolutePath().toString()),
null
);
final String[] headers = csvReader.getHeaders();
Field[] fieldsFoundInZip = gtfsTable.getFieldsFromFieldHeaders(headers, null);
int transformFieldIndex = getFieldIndex(fieldsFoundInZip, fieldName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import com.conveyal.gtfs.loader.Field;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.model.StopTime;
import com.conveyal.gtfs.util.CsvReaderUtil;
import com.csvreader.CsvReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -45,7 +46,7 @@ public class MergeFeedUtils {
public static Set<String> getIdsForTable(ZipFile zipFile, Table table) throws IOException {
Set<String> ids = new HashSet<>();
String keyField = table.getKeyFieldName();
CsvReader csvReader = table.getCsvReader(zipFile, null);
CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, zipFile, null);
if (csvReader == null) {
LOG.warn("Table {} not found in zip file: {}", table.name, zipFile.getName());
return ids;
Expand Down Expand Up @@ -117,7 +118,7 @@ public static Set<Field> getAllFields(List<FeedToMerge> feedsToMerge, Table tabl
Set<Field> sharedFields = new HashSet<>();
// First, iterate over each feed to collect the shared fields that need to be output in the merged table.
for (FeedToMerge feed : feedsToMerge) {
CsvReader csvReader = table.getCsvReader(feed.zipFile, null);
CsvReader csvReader = CsvReaderUtil.getCsvReaderAccordingToFileName(table, feed.zipFile, null);
// If csv reader is null, the table was not found in the zip file.
if (csvReader == null) {
continue;
Expand Down

0 comments on commit 1e6769a

Please sign in to comment.