Skip to content

Commit

Permalink
refactor(Fixed merge conflicts): Fixed merge conflicts within pom.xml…
Browse files Browse the repository at this point in the history
… and updated gtfs-lib to point
  • Loading branch information
Robin Beer authored and Robin Beer committed Jul 10, 2023
2 parents 96c2532 + 568d9fd commit 77243f1
Show file tree
Hide file tree
Showing 39 changed files with 1,624 additions and 357 deletions.
9 changes: 5 additions & 4 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,11 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v2
- name: Set up JDK 1.8
uses: actions/setup-java@v1
- name: Set up JDK 19
uses: actions/setup-java@v3
with:
java-version: 1.8
java-version: 19
distribution: 'temurin'
# Install node 14 for running e2e tests (and for maven-semantic-release).
- name: Use Node.js 18.x
uses: actions/setup-node@v1
Expand Down Expand Up @@ -67,7 +68,7 @@ jobs:
- name: Setup GTFS+ directory (used during testing)
run: mkdir /tmp/gtfsplus
- name: Build with Maven (run unit tests)
run: mvn --no-transfer-progress package
run: mvn --no-transfer-progress -X package
- name: Run e2e tests
if: env.SHOULD_RUN_E2E == 'true'
run: mvn test
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# syntax=docker/dockerfile:1
FROM maven:3.8.6-openjdk-11
FROM maven:3.8.7-openjdk-18-slim

COPY . /datatools

Expand Down
10 changes: 8 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Transit Data Manager

The core application for IBI Group's transit data tools suite.
[![Join the chat at https://matrix.to/#/#transit-data-tools:gitter.im](https://badges.gitter.im/repo.png)](https://matrix.to/#/#transit-data-tools:gitter.im)

The core application for IBI Group's TRANSIT-Data-Tools suite.

## Documentation

Expand All @@ -9,4 +11,8 @@ View the [latest documentation](http://conveyal-data-tools.readthedocs.org/en/la
Note: `dev` branch docs can be found [here](http://conveyal-data-tools.readthedocs.org/en/dev/).

## Docker Image
The easiest way to get `datatools-server` running is to use the provided `Dockerfile` and `docker-compose.yml`. The `docker-compose.yml` includes both database servers that are needed. Edit the supplied configurations in the `configurations` directory to ensure the server starts correctly. Once this is done running `docker-compose up` will start Datatools and all required database servers.
The easiest way to get `datatools-server` running is to use the provided `Dockerfile` and `docker-compose.yml`. The `docker-compose.yml` includes both database servers that are needed. Edit the supplied configurations in the `configurations` directory to ensure the server starts correctly. Once this is done running `docker-compose up` will start Datatools and all required database servers.

## Getting in touch

We have a Gitter [space](https://matrix.to/#/#transit-data-tools:gitter.im) for the full TRANSIT-Data-Tools project where you can post questions and comments.
2 changes: 2 additions & 0 deletions configurations/default/server.yml.tmp
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ modules:
ec2:
enabled: false
default_ami: ami-your-ami-id
tag_key: a-tag-key-to-add-to-all-instances
tag_value: a-tag-value-to-add-to-all-instances
# Note: using a cloudfront URL for these download URLs will greatly
# increase download/deploy speed.
otp_download_url: https://optional-otp-repo.com
Expand Down
28 changes: 22 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- Using the latest version of geotools (e.g, 20) seems to cause issues with the shapefile
plugin where the_geom for each feature is null. -->
<geotools.version>17.5</geotools.version>
<geotools.version>20.1</geotools.version>
<awsjavasdk.version>1.11.625</awsjavasdk.version>
</properties>
<build>
Expand Down Expand Up @@ -96,8 +96,8 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>3.7.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
<plugin>
Expand Down Expand Up @@ -163,6 +163,12 @@
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.2</version>
<configuration>
<!-- This line is required to get this library, designed for older Java, to work with newer java. -->
<argLine>
--illegal-access=permit
</argLine>
</configuration>
</plugin>
</plugins>
</build>
Expand Down Expand Up @@ -217,7 +223,7 @@
<dependency>
<groupId>com.sparkjava</groupId>
<artifactId>spark-core</artifactId>
<version>2.7.2</version>
<version>2.9.4</version>
</dependency>

<!-- Logging -->
Expand Down Expand Up @@ -269,7 +275,7 @@
<dependency>
<groupId>com.github.conveyal</groupId>
<artifactId>gtfs-lib</artifactId>
<version>34a75aa</version>
<version>7826c92</version>
<!-- Exclusions added in order to silence SLF4J warnings about multiple bindings:
http://www.slf4j.org/codes.html#multiple_bindings
-->
Expand All @@ -285,7 +291,7 @@
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver-sync</artifactId>
<version>4.0.5</version>
<version>4.0.6</version>
</dependency>

<!-- Miscellaneous utilities -->
Expand Down Expand Up @@ -447,6 +453,16 @@
<artifactId>aws-java-sdk-sts</artifactId>
<version>${awsjavasdk.version}</version>
</dependency>
<dependency>
<groupId>com.github.MobilityData.gtfs-validator</groupId>
<artifactId>gtfs-validator-main</artifactId>
<version>4.0.0</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.6</version>
</dependency>
</dependencies>

</project>
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand All @@ -41,6 +43,7 @@
import static com.conveyal.datatools.common.utils.SparkUtils.getObjectNode;
import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt;
import static com.conveyal.datatools.manager.controllers.api.UserController.inTestingEnvironment;
import static org.eclipse.jetty.http.HttpStatus.OK_200;
import static spark.Spark.delete;
import static spark.Spark.options;
import static spark.Spark.patch;
Expand Down Expand Up @@ -129,6 +132,10 @@ private void registerRoutes() {
put(ROOT_ROUTE + ID_PARAM + "/stop_times", this::updateStopTimesFromPatternStops, json::write);
delete(ROOT_ROUTE + ID_PARAM + "/trips", this::deleteTripsForPattern, json::write);
}

if ("stop".equals(classToLowercase)) {
delete(ROOT_ROUTE + ID_PARAM + "/cascadeDeleteStop", this::cascadeDeleteStop, json::write);
}
}

/**
Expand Down Expand Up @@ -258,6 +265,81 @@ private String deleteTripsForPattern(Request req, Response res) {
}
}

/**
* HTTP endpoint to delete a stop and all references in stop times and pattern stops given a string stop_id (i.e. not
* the integer ID field). Then normalize the stop times for all updated patterns (i.e. the ones where the stop has
* been deleted).
*/
private String cascadeDeleteStop(Request req, Response res) {
// Table writer closes the database connection after use, so a new one is required for each task.
JdbcTableWriter tableWriter;
long startTime = System.currentTimeMillis();
String namespace = getNamespaceAndValidateSession(req);
String stopIdColumnName = "stop_id";

// NOTE: This is a string stop ID, not the integer ID that other HTTP endpoints use.
String stopId = req.params("id");
if (stopId == null) {
logMessageAndHalt(req, 400, "Must provide a valid stopId.");
}

try (
Connection connection = datasource.getConnection();
PreparedStatement statement = connection.prepareStatement(
String.format("select id, stop_sequence from %s.pattern_stops where %s = ?", namespace, stopIdColumnName)
)
) {
// Get the patterns to be normalized before the related stop is deleted.
statement.setString(1, stopId);
ResultSet resultSet = statement.executeQuery();
Map<Integer, Integer> patternsToBeNormalized = new HashMap<>();
while (resultSet.next()) {
patternsToBeNormalized.put(
resultSet.getInt("id"),
resultSet.getInt("stop_sequence")
);
}

tableWriter = new JdbcTableWriter(Table.STOP_TIMES, datasource, namespace);
int deletedCountStopTimes = tableWriter.deleteWhere(stopIdColumnName, stopId, true);

int deletedCountPatternStops = 0;
if (!patternsToBeNormalized.isEmpty()) {
tableWriter = new JdbcTableWriter(Table.PATTERN_STOP, datasource, namespace);
deletedCountPatternStops = tableWriter.deleteWhere(stopIdColumnName, stopId, true);
if (deletedCountPatternStops > 0) {
for (Map.Entry<Integer, Integer> patternStop : patternsToBeNormalized.entrySet()) {
tableWriter = new JdbcTableWriter(Table.PATTERN_STOP, datasource, namespace);
int stopSequence = patternStop.getValue();
// Begin with the stop prior to the one deleted, unless at the beginning.
int beginWithSequence = (stopSequence != 0) ? stopSequence - 1 : stopSequence;
tableWriter.normalizeStopTimesForPattern(patternStop.getKey(), beginWithSequence);
}
}
}

tableWriter = new JdbcTableWriter(Table.STOPS, datasource, namespace);
int deletedCountStop = tableWriter.deleteWhere(stopIdColumnName, stopId, true);

return formatJSON(
String.format(
"Deleted %d stop, %d pattern stops and %d stop times.",
deletedCountStop,
deletedCountPatternStops,
deletedCountStopTimes),
OK_200
);
} catch (InvalidNamespaceException e) {
logMessageAndHalt(req, 400, "Invalid namespace.", e);
return null;
} catch (Exception e) {
logMessageAndHalt(req, 500, "Error deleting entity.", e);
return null;
} finally {
LOG.info("Cascade delete of stop operation took {} msec.", System.currentTimeMillis() - startTime);
}
}

/**
* Currently designed to delete multiple trips in a single transaction. Trip IDs should be comma-separated in a query
* parameter. TODO: Implement this for other entity types?
Expand Down
10 changes: 5 additions & 5 deletions src/main/java/com/conveyal/datatools/editor/utils/GeoUtils.java
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
package com.conveyal.datatools.editor.utils;

import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.Polygon;
import org.geotools.referencing.GeodeticCalculator;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.LineString;
import org.locationtech.jts.geom.LinearRing;
import org.locationtech.jts.geom.Polygon;

import java.awt.geom.Point2D;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@ the License, or (at your option) any later version.

package com.conveyal.datatools.editor.utils;

import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.MultiLineString;


import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.LineString;
import org.locationtech.jts.geom.MultiLineString;

import java.util.AbstractList;
import java.util.ArrayList;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.conveyal.datatools.manager.jobs.ProcessSingleFeedJob;
import com.conveyal.datatools.manager.jobs.ValidateFeedJob;
import com.conveyal.datatools.manager.jobs.ValidateMobilityDataFeedJob;
import com.conveyal.datatools.manager.models.Deployment;
import com.conveyal.datatools.manager.models.ExternalFeedSourceProperty;
import com.conveyal.datatools.manager.models.FeedRetrievalMethod;
Expand Down Expand Up @@ -354,18 +355,17 @@ public static boolean validateAll (boolean load, boolean force, String filterFee
// If the force option is not true and the validation result did not fail, re-validate.
continue;
}
MonitorableJob job;
if (filterFeedId != null && !version.feedSourceId.equals(filterFeedId)) {
// Skip all feeds except Cortland for now.
continue;
}
Auth0UserProfile systemUser = Auth0UserProfile.createSystemUser();
if (load) {
job = new ProcessSingleFeedJob(version, systemUser, false);
JobUtils.heavyExecutor.execute(new ProcessSingleFeedJob(version, systemUser, false));
} else {
job = new ValidateFeedJob(version, systemUser, false);
JobUtils.heavyExecutor.execute(new ValidateFeedJob(version, systemUser, false));
JobUtils.heavyExecutor.execute(new ValidateMobilityDataFeedJob(version, systemUser, false));
}
JobUtils.heavyExecutor.execute(job);
}
// ValidateAllFeedsJob validateAllFeedsJob = new ValidateAllFeedsJob("system", force, load);
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ private static String missingIdText(String value, String entity) {
}

/**
* Gets the text that is displayed for an option.
* Gets the displayed text for an option.
*/
static String getOptionText(String value, JsonNode specField) {
JsonNode optionNode = findOptionNode(value, specField);
Expand Down
16 changes: 13 additions & 3 deletions src/main/java/com/conveyal/datatools/manager/jobs/DeployJob.java
Original file line number Diff line number Diff line change
Expand Up @@ -993,6 +993,7 @@ private List<Instance> startEC2Instances(int count, boolean graphAlreadyBuilt) {
return Collections.EMPTY_LIST;
}
status.message = String.format("Starting up %d new instance(s) to run OTP", count);

RunInstancesRequest runInstancesRequest = new RunInstancesRequest()
.withNetworkInterfaces(interfaceSpecification)
.withInstanceType(instanceType)
Expand Down Expand Up @@ -1038,16 +1039,25 @@ private List<Instance> startEC2Instances(int count, boolean graphAlreadyBuilt) {
String serverName = String.format("%s %s (%s) %d %s", deployment.tripPlannerVersion, deployment.name, dateString, serverCounter++, graphAlreadyBuilt ? "clone" : "builder");
LOG.info("Creating tags for new EC2 instance {}", serverName);
try {
getEC2ClientForDeployJob().createTags(new CreateTagsRequest()
CreateTagsRequest createTagsRequest = new CreateTagsRequest()
.withTags(new Tag("Name", serverName))
.withTags(new Tag("projectId", deployment.projectId))
.withTags(new Tag("deploymentId", deployment.id))
.withTags(new Tag("jobId", this.jobId))
.withTags(new Tag("serverId", otpServer.id))
.withTags(new Tag("routerId", getRouterId()))
.withTags(new Tag("user", retrieveEmail()))
.withResources(instance.getInstanceId())
);
.withResources(instance.getInstanceId());

String tagKey = DataManager.getConfigPropertyAsText("modules.deployment.ec2.tag_key");
String tagValue = DataManager.getConfigPropertyAsText("modules.deployment.ec2.tag_value");

Tag customTag = new Tag();
customTag.setKey(tagKey);
customTag.setValue(tagValue);

createTagsRequest = createTagsRequest.withTags(customTag);
getEC2ClientForDeployJob().createTags(createTagsRequest);
} catch (Exception e) {
status.fail("Failed to create tags for instances.", e);
return instances;
Expand Down
Loading

0 comments on commit 77243f1

Please sign in to comment.