diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml
new file mode 100644
index 000000000..26a711601
--- /dev/null
+++ b/.github/workflows/maven.yml
@@ -0,0 +1,164 @@
+name: Java CI
+
+on: [push, pull_request]
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+ services:
+ postgres:
+ image: postgres:10.8
+ # Set postgres env variables according to test env.yml config
+ env:
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: postgres
+ POSTGRES_DB: catalogue
+ ports:
+ - 5432:5432
+ # Set health checks to wait until postgres has started
+ options: >-
+ --health-cmd pg_isready
+ --health-interval 10s
+ --health-timeout 5s
+ --health-retries 5
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v1
+ with:
+ java-version: 1.8
+ # Install node 12 for running e2e tests (and for maven-semantic-release).
+ - name: Use Node.js 12.x
+ uses: actions/setup-node@v1
+ with:
+ node-version: 12.x
+ - name: Start MongoDB
+ uses: supercharge/mongodb-github-action@1.3.0
+ with:
+ mongodb-version: 4.2
+ - name: Setup Maven Cache
+ uses: actions/cache@v2
+ id: cache
+ with:
+ path: ~/.m2
+ key: maven-local-repo
+ - name: Inject slug/short variables # so that we can reference $GITHUB_HEAD_REF_SLUG for branch name
+ uses: rlespinasse/github-slug-action@v3.x
+ - name: Install maven-semantic-release
+ # FIXME: Enable cache for node packages (add package.json?)
+ run: |
+ yarn global add @conveyal/maven-semantic-release semantic-release
+ # Add yarn path to GITHUB_PATH so that global package is executable.
+ echo "$(yarn global bin)" >> $GITHUB_PATH
+ # run a script to see if the e2e tests should be ran. This script will set the environment variable SHOULD_RUN_E2E
+ # which is used in later CI commands.
+ - name: Check if end-to-end tests should run
+ run: ./scripts/check-if-e2e-tests-should-run-on-ci.sh
+ - name: Add profile credentials to ~/.aws/credentials
+ run: ./scripts/add-aws-credentials.sh
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_REGION: ${{ secrets.AWS_REGION }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ - name: Setup GTFS+ directory (used during testing)
+ run: mkdir /tmp/gtfsplus
+ - name: Build with Maven (run unit tests)
+ run: mvn --no-transfer-progress package
+ - name: Restart MongoDB with fresh database (for e2e tests)
+ run: ./scripts/restart-mongo-with-fresh-db.sh
+ - name: Copy unit test coverage results into another folder # so the e2e tests don't overwrite them
+ run: cp -R target target-unit-test-results
+ - name: Run e2e tests
+ if: env.SHOULD_RUN_E2E == 'true'
+ run: mvn test
+ env:
+ AUTH0_API_CLIENT: ${{ secrets.AUTH0_API_CLIENT }}
+ AUTH0_API_SECRET: ${{ secrets.AUTH0_API_SECRET }}
+ AUTH0_CLIENT_ID: ${{ secrets.AUTH0_CLIENT_ID }}
+ AUTH0_DOMAIN: ${{ secrets.AUTH0_DOMAIN }}
+ AUTH0_SECRET: ${{ secrets.AUTH0_SECRET }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_REGION: ${{ secrets.AWS_REGION }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ E2E_AUTH0_PASSWORD: ${{ secrets.E2E_AUTH0_PASSWORD }}
+ E2E_AUTH0_USERNAME: ${{ secrets.E2E_AUTH0_USERNAME }}
+ GRAPH_HOPPER_KEY: ${{ secrets.GRAPH_HOPPER_KEY }}
+ GTFS_DATABASE_PASSWORD: ${{ secrets.GTFS_DATABASE_PASSWORD }}
+ GTFS_DATABASE_URL: ${{ secrets.GTFS_DATABASE_URL }}
+ GTFS_DATABASE_USER: ${{ secrets.GTFS_DATABASE_USER }}
+ MAPBOX_ACCESS_TOKEN: ${{ secrets.MAPBOX_ACCESS_TOKEN }}
+ MONGO_DB_NAME: ${{ secrets.MONGO_DB_NAME }}
+ OSM_VEX: ${{ secrets.OSM_VEX }}
+ RUN_E2E: "true"
+ S3_BUCKET: ${{ secrets.S3_BUCKET }}
+ SPARKPOST_EMAIL: ${{ secrets.SPARKPOST_EMAIL }}
+ SPARKPOST_KEY: ${{ secrets.SPARKPOST_KEY }}
+ TRANSITFEEDS_KEY: ${{ secrets.TRANSITFEEDS_KEY }}
+ - name: Copy e2e coverage results into another folder # so the deployment results don't overwrite them
+ run: if [ "$SHOULD_RUN_E2E" = "true" ]; then cp -R target target-e2e-test-results; fi
+ # these first codecov runs will upload a report associated with the commit set through CI environment variables
+ # use codecov script flags to upload the coverage report for the unit tests
+ - name: Upload codecov for unit tests
+ run: bash <(curl -s https://codecov.io/bash) -s target-unit-test-results -F unit_tests
+ - name: Upload the coverage report for the e2e tests
+ run: |
+ if [ "$SHOULD_RUN_E2E" = "true" ]; then
+ bash <(curl -s https://codecov.io/bash) -s target-e2e-test-results -F end_to_end_tests;
+ fi
+
+ # Run maven-semantic-release to potentially create a new release of datatools-server. The flag --skip-maven-deploy is
+ # used to avoid deploying to maven central. So essentially, this just creates a release with a changelog on github.
+ #
+ # If maven-semantic-release finishes successfully and the current branch is master, upload coverage reports for the
+ # commits that maven-semantic-release generated. Since the above codecov run is associated with the commit that
+ # initiated the CI build, the report will not be associated with the commits that maven-semantic-release performed
+ # (if it ended up creating a release and the two commits that were a part of that workflow). Therefore, if on master
+ # codecov needs to be ran two more times to create codecov reports for the commits made by maven-semantic-release.
+ # See https://github.com/conveyal/gtfs-lib/issues/193. In order to create reports for both the unit and e2e tsts,
+ # the codecov scripts must be ran twice.
+ #
+ # The git commands get the commit hash of the HEAD commit and the commit just before HEAD.
+ - name: Run maven-semantic-release
+ env:
+ GH_TOKEN: ${{ secrets.GH_TOKEN }}
+ run: |
+ semantic-release --prepare @conveyal/maven-semantic-release --publish @semantic-release/github,@conveyal/maven-semantic-release --verify-conditions @semantic-release/github,@conveyal/maven-semantic-release --verify-release @conveyal/maven-semantic-release --use-conveyal-workflow --dev-branch=dev --skip-maven-deploy
+ if [[ "$GITHUB_REF_SLUG" = "master" ]]; then
+ bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD)" -s target-unit-test-results -F unit_tests
+ bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD^)" -s target-unit-test-results -F unit_tests
+ if [ "$SHOULD_RUN_E2E" = "true" ]; then
+ bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD)" -s target-e2e-test-results -F end_to_end_tests;
+ bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD^)" -s target-e2e-test-results -F end_to_end_tests;
+ fi
+ fi
+
+
+ - name: Prepare deploy artifacts
+ run: |
+ # get branch name of current branch for use in jar name
+ export BRANCH=$GITHUB_HEAD_REF_SLUG
+ # Replace forward slashes with underscores in branch name.
+ export BRANCH_CLEAN=${BRANCH//\//_}
+ # Create directory that will contain artifacts to deploy to s3.
+ mkdir deploy
+ # Display contents of target directory (for logging purposes only).
+ ls target/*.jar
+ # Copy packaged jar over to deploy dir.
+ cp target/dt-*.jar deploy/
+ # Get the first jar file and copy it into a new file that adds the current branch name. During a
+ # merge to master, there are multiple jar files produced, but they're each effectively the same
+ # code (there may be slight differences in the version shown in the `pom.xml`, but that's not
+ # important for the purposes of creating this "latest branch" jar).
+ ALL_JARS=(target/dt-*.jar)
+ FIRST_JAR="${ALL_JARS[0]}"
+ cp "$FIRST_JAR" "deploy/dt-latest-$BRANCH_CLEAN.jar"
+ - name: Deploy to S3
+ uses: jakejarvis/s3-sync-action@master
+ with:
+ args: --acl public-read
+ env:
+ AWS_S3_BUCKET: datatools-builds
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ SOURCE_DIR: 'deploy'
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 214f26b71..000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,110 +0,0 @@
-dist: trusty # jdk 8 not available on xenial
-language: java
-java:
- - oraclejdk8
-sudo: false
-# Install mongoDB to perform persistence tests
-services:
- - mongodb
- - postgresql
-addons:
- postgresql: 9.6
-cache:
- directories:
- - $HOME/.m2
- - $HOME/.cache/yarn
-before_install:
- #- sed -i.bak -e 's|https://nexus.codehaus.org/snapshots/|https://oss.sonatype.org/content/repositories/codehaus-snapshots/|g' ~/.m2/settings.xml
- # run a script to see if the e2e tests should be ran. This script will set the environment variable SHOULD_RUN_E2E
- # which is used in later travis commands.
- # FIXME: E2E is disabled because it has broken for months. PR to fix e2e should uncomment this line.
- # - source ./scripts/check-if-e2e-tests-should-run-on-travis.sh
- # set region in AWS config for S3 setup
- # - mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=foo' 'aws_secret_access_key=bar' 'region=us-east-1' > ~/.aws/config
- # add aws credentials for datatools-server
- - if [ "$SHOULD_RUN_E2E" = "true" ]; then mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=${AWS_ACCESS_KEY_ID}' 'aws_secret_access_key=${AWS_SECRET_ACCESS_KEY}' 'region=us-east-1' > ~/.aws/config; else mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=foo' 'aws_secret_access_key=bar' 'region=us-east-1' > ~/.aws/config; fi
- - cp configurations/default/server.yml.tmp configurations/default/server.yml
- # create database for e2e (and unit) tests
- - psql -U postgres -c 'CREATE DATABASE catalogue;'
- # install node v12 here in order to run the e2e tests (and for maven-semantic-release).
- - nvm install '12'
-# Skip the install step (mvn install).
-install: true
-# Install semantic-release
-before_script:
- - yarn global add @conveyal/maven-semantic-release semantic-release@15
- # Create dir for GTFS+ files (used during testing)
- - mkdir /tmp/gtfsplus
-script:
- # run mvn package (only print errors) to make sure unit tests and packaging can work
- - mvn -q package
- # Restart/clear MongoDB so that E2E tests run on clean DB.
- - ./scripts/restart-mongo-with-fresh-db.sh
- # recursively copy coverage results into another folder so the e2e tests don't overwrite them
- - cp -R target target-unit-test-results
- # run just the e2e tests
- - if [ "$SHOULD_RUN_E2E" = "true" ]; then RUN_E2E=true mvn test; fi
- # recursively copy coverage results into another folder so the deployment results don't overwrite them
- - if [ "$SHOULD_RUN_E2E" = "true" ]; then cp -R target target-e2e-test-results; fi
-after_success:
- # these first codecov runs will upload a report associated with the commit set through Travis CI environment variables
- # use codecov script flags to upload the coverage report for the unit tests
- - bash <(curl -s https://codecov.io/bash) -s target-unit-test-results -F unit_tests
- # use codecov script flags to upload the coverage report for the e2e tests
- - |
- if [ "$SHOULD_RUN_E2E" = "true" ]; then
- bash <(curl -s https://codecov.io/bash) -s target-e2e-test-results -F end_to_end_tests;
- fi
-
- # run maven-semantic-release to potentially create a new release of datatools-server. The flag --skip-maven-deploy is
- # used to avoid deploying to maven central. So essentially, this just creates a release with a changelog on github.
- #
- # If maven-semantic-release finishes successfully and the current branch is master, upload coverage reports for the
- # commits that maven-semantic-release generated. Since the above codecov run is associated with the commit that
- # initiated the Travis build, the report will not be associated with the commits that maven-semantic-release performed
- # (if it ended up creating a release and the two commits that were a part of that workflow). Therefore, if on master
- # codecov needs to be ran two more times to create codecov reports for the commits made by maven-semantic-release.
- # See https://github.com/conveyal/gtfs-lib/issues/193. In order to create reports for both the unit and e2e tsts,
- # the codecov scripts must be ran twice.
- #
- # The git commands get the commit hash of the HEAD commit and the commit just before HEAD.
- - |
- semantic-release --prepare @conveyal/maven-semantic-release --publish @semantic-release/github,@conveyal/maven-semantic-release --verify-conditions @semantic-release/github,@conveyal/maven-semantic-release --verify-release @conveyal/maven-semantic-release --use-conveyal-workflow --dev-branch=dev --skip-maven-deploy
- if [[ "$TRAVIS_BRANCH" = "master" ]]; then
- bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD)" -s target-unit-test-results -F unit_tests
- bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD^)" -s target-unit-test-results -F unit_tests
- if [ "$SHOULD_RUN_E2E" = "true" ]; then
- bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD)" -s target-e2e-test-results -F end_to_end_tests;
- bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD^)" -s target-e2e-test-results -F end_to_end_tests;
- fi
- fi
-before_deploy:
-# get branch name of current branch for use in jar name: https://graysonkoonce.com/getting-the-current-branch-name-during-a-pull-request-in-travis-ci/
-- export BRANCH=$(if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then echo $TRAVIS_BRANCH; else echo $TRAVIS_PULL_REQUEST_BRANCH; fi)
- # Replace forward slashes with underscores in branch name.
-- export BRANCH_CLEAN=${BRANCH//\//_}
- # Create directory that will contain artifacts to deploy to s3.
-- mkdir deploy
- # Display contents of target directory (for logging purposes only).
-- ls target/*.jar
- # Copy packaged jar over to deploy dir.
-- cp target/dt-*.jar deploy/
- # Get the first jar file and copy it into a new file that adds the current branch name. During a
- # merge to master, there are multiple jar files produced, but they're each effectively the same
- # code (there may be slight differences in the version shown in the `pom.xml`, but that's not
- # important for the purposes of creating this "latest branch" jar).
-- ALL_JARS=(target/dt-*.jar)
-- FIRST_JAR="${ALL_JARS[0]}"
-- cp "$FIRST_JAR" "deploy/dt-latest-$BRANCH_CLEAN.jar"
-deploy:
- provider: s3
- skip_cleanup: true
- access_key_id: AKIA2YG5XQ2YEJZCBR7J
- secret_access_key:
- secure: eLu2cZaG2stcSJe4ZLKq+q7FaQa1fKcPsbxeMl2fda3lcLBUN+6OoFH5dJzz/M5I2GFn/NOAt5wPmljOZf4ni01dqDimdI6qyVLyWbeOFcl6kbawVRcVpHByUiqkrj3vZ3VzGVkfud3OPdLE264xEMax/YBTK/lUA1n4X1EKW7JijaCy5RtBrej4jGXH4XnP5aJrNtsDieLkpnTcq6wfQ0CfcNvcTwz2/XvUD5Li7AJo7/r7ueyA4GUQBqinkAHIoqCv94Tx+NPhcUTywdIGaUmDYR5/uiHubFFyV5dffpkmxYStXS+VU9aJYQuF6w8YmU+RvD86dVSDw0w3eSUsM72hgfANe5Dq0XsCcLEeq+e9U8Xsb3cWcC6dYLwT2lTbufRSDhj1C/NNFZFoXW7hEMujbyKpFYulccoZ7zxHHnD76BZarvRSRofzMQSITuyghlRvbDyAehK8x8qf5MKvXJ52DSlxOkgfN3K45jMtCndpRXPeGh2rwwt5vGpC3Mdsapmxwd3D7YHrIgZtRr+DmmmMIm76UdJTmzozYm6JA8ocldYsnnHkEYu8Ailb+R06BF1+p7DdW8NiG8+hsaXGnKYI8wo6cCibY6KPMRKffFTLQGmitqIGhGUk79xhtTiGax1bDZgb/iHwE6lSr85lSHqjALLDLjWkxOYb+GIj1Qo=
- bucket: datatools-builds
- local-dir: deploy
- acl: public_read
- on:
- repo: ibi-group/datatools-server
- all_branches: true
diff --git a/configurations/default/server.yml.tmp b/configurations/default/server.yml.tmp
index 760ed9cd8..6d78c25cc 100644
--- a/configurations/default/server.yml.tmp
+++ b/configurations/default/server.yml.tmp
@@ -27,7 +27,6 @@ modules:
# Note: using a cloudfront URL for these download URLs will greatly
# increase download/deploy speed.
otp_download_url: https://optional-otp-repo.com
- r5_download_url: https://optional-r5-repo.com
user_admin:
enabled: true
gtfsapi:
diff --git a/configurations/test/env.yml.tmp b/configurations/test/env.yml.tmp
index bf1cbe686..ee8a12604 100644
--- a/configurations/test/env.yml.tmp
+++ b/configurations/test/env.yml.tmp
@@ -13,8 +13,8 @@ OSM_VEX: http://localhost:1000
SPARKPOST_KEY: your-sparkpost-key
SPARKPOST_EMAIL: email@example.com
GTFS_DATABASE_URL: jdbc:postgresql://localhost/catalogue
-# GTFS_DATABASE_USER:
-# GTFS_DATABASE_PASSWORD:
+GTFS_DATABASE_USER: postgres
+GTFS_DATABASE_PASSWORD: postgres
# To configure a remote MongoDB service (such as MongoDB Atlas), provide all
# Mongo properties below. Otherwise, only a database name is needed (server
diff --git a/pom.xml b/pom.xml
index cf946cd30..b4495f7f6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
com.conveyal
datatools-server
- 3.8.0
+ 3.9.0
@@ -40,6 +40,7 @@
2.10.1
+ UTF-8
17.5
@@ -102,7 +103,7 @@
pl.project13.maven
git-commit-id-plugin
- 2.2.1
+ 3.0.1
@@ -117,6 +118,13 @@
-->
true
true
+
+
+ false
+ true
+
@@ -248,9 +256,9 @@
- AWS S3 SDK - putting/getting objects into/out of S3.
-->
- com.conveyal
+ com.github.conveyal
gtfs-lib
- 6.1.0
+ 6.2.2
@@ -273,7 +281,7 @@
com.google.guava
guava
- 18.0
+ 30.0-jre