Github Actions for Staging Backend (#1281)

* Github Actions for Staging Backend

* trigger run
This commit is contained in:
Jorge Escobar 2022-02-16 16:40:25 -05:00 committed by GitHub
commit 82809a5123
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 118 additions and 87 deletions

20
.github/workflows/closed_fe_pr.yml vendored Normal file
View file

@ -0,0 +1,20 @@
name: Closed Frontend PR
on:
pull_request:
types: [closed]
paths:
- "client/**/*"
env:
PR_NUMBER: ${{ github.event.number }}
jobs:
remove-artifacts:
runs-on: ubuntu-latest
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Remove staging artifacts
run: aws s3 rm s3://usds-geoplatform-justice40-website/justice40-tool/ --exclude "*" --include "$(echo $PR_NUMBER)-*" --recursive

View file

@ -1,20 +0,0 @@
name: Closed PR
on:
pull_request:
types: [ closed ]
paths:
- "client/**/*"
env:
PR_NUMBER: ${{ github.event.number }}
jobs:
remove-artifacts:
runs-on: ubuntu-latest
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Remove staging artifacts
run: aws s3 rm s3://usds-geoplatform-justice40-website/justice40-tool/ --exclude "*" --include "$(echo $PR_NUMBER)-*" --recursive

89
.github/workflows/deploy_be_staging.yml vendored Normal file
View file

@ -0,0 +1,89 @@
name: Deploy Frontend staging
on:
pull_request:
branches: [main]
paths:
- "data/*"
env:
PR_NUMBER: ${{github.event.pull_request.number}}
SHA_NUMBER: ${{github.event.pull_request.head.sha}}
jobs:
generate-score-tiles:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Setup Poetry
uses: Gr1N/setup-poetry@v7
- name: Print poetry version
run: poetry --version
- name: Install dependencies
run: poetry install
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Generate Score
run: |
poetry run python3 data_pipeline/application.py score-full-run
- name: Generate Score Post
run: |
poetry run python3 data_pipeline/application.py generate-score-post -s aws
- name: Generate Score Geo
run: |
poetry run python3 data_pipeline/application.py geo-score
- name: Install GDAL/ogr2ogr
run: |
sudo add-apt-repository ppa:ubuntugis/ppa
sudo apt-get -y install gdal-bin
ogrinfo --version
- name: Set timezone for tippecanoe
uses: szenius/set-timezone@v1.0
with:
timezoneLinux: "America/Los_Angeles"
- name: Get tippecanoe
run: |
sudo apt-get install -y software-properties-common libsqlite3-dev zlib1g-dev
sudo apt-add-repository -y ppa:git-core/ppa
sudo mkdir -p /tmp/tippecanoe-src
sudo git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src
- name: Make tippecanoe
working-directory: /tmp/tippecanoe-src
run: |
sudo /usr/bin/bash -c make
mkdir -p /usr/local/bin
cp tippecanoe /usr/local/bin/tippecanoe
tippecanoe -v
- name: Generate Tiles
run: |
poetry run python3 data_pipeline/application.py generate-map-tiles
- name: Deploy to Geoplatform AWS
run: |
aws s3 cp ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/csv --acl public-read --delete
aws s3 cp ./data_pipeline/data/score/downloadable/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/downloadable --acl public-read --delete
aws s3 cp ./data_pipeline/data/score/geojson/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/${{env.PR_NUMBER}}score/geojson --acl public-read --delete
aws s3 cp ./data_pipeline/data/score/tiles/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/tiles --recursive --acl public-read
- name: Update PR with deployed URL
uses: mshick/add-pr-comment@v1
with:
# Deploy to S3 for the staging URL
message: |
**🚢 PR Deployed! 🚢**
Find it here: http://usds-geoplatform-justice40-website.s3-website-us-east-1.amazonaws.com/justice40-tool/${{env.PR_NUMBER}}/en/cejst/ !
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: "github-actions[bot]" # The user.login for temporary GitHub tokens
allow-repeats: false # This is the default

View file

@ -1,15 +1,12 @@
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node name: Deploy Frontend Prod
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Deploy prod
on: on:
push: push:
branches: [main] branches: [main]
paths: paths:
- "client/**/*" - "client/**/*"
env: env:
PR_NUMBER: ${{ github.event.number }} PR_NUMBER: ${{ github.event.number }}
WEB_CDN_ID: E27WCOZZ03KIBX WEB_CDN_ID: E27WCOZZ03KIBX
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -33,7 +30,7 @@ jobs:
- name: Build - name: Build
run: npm run build --if-present run: npm run build --if-present
env: env:
# See the client readme for more info on environment variables: # See the client readme for more info on environment variables:
# https://github.com/usds/justice40-tool/blob/main/client/README.md # https://github.com/usds/justice40-tool/blob/main/client/README.md
DATA_SOURCE: cdn DATA_SOURCE: cdn
# TODO: Update main URL when either is back up # TODO: Update main URL when either is back up

View file

@ -1,13 +1,10 @@
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node name: Deploy Frontend staging
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Deploy stage
on: on:
pull_request: pull_request:
branches: [main] branches: [main]
paths: paths:
- "client/**/*" - "client/**/*"
env: env:
PR_NUMBER: ${{github.event.pull_request.number}} PR_NUMBER: ${{github.event.pull_request.number}}
WORKING_DIRECTORY: ./client WORKING_DIRECTORY: ./client
jobs: jobs:
@ -33,7 +30,7 @@ jobs:
- name: Build - name: Build
run: npm run build --if-present run: npm run build --if-present
env: env:
# See the client readme for more info on environment variables: # See the client readme for more info on environment variables:
# https://github.com/usds/justice40-tool/blob/main/client/README.md # https://github.com/usds/justice40-tool/blob/main/client/README.md
DATA_SOURCE: cdn DATA_SOURCE: cdn
SITE_URL: "${{ secrets.STAGE_SITE_URL }}" SITE_URL: "${{ secrets.STAGE_SITE_URL }}"
@ -55,7 +52,7 @@ jobs:
with: with:
working-directory: ${{env.WORKING_DIRECTORY}} working-directory: ${{env.WORKING_DIRECTORY}}
start: npm start start: npm start
wait-on: 'http://localhost:8000' wait-on: "http://localhost:8000"
- name: Upload Artifact - name: Upload Artifact
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:

View file

@ -1,52 +0,0 @@
name: Generate Score
on:
workflow_dispatch:
inputs:
confirm-action:
description: This will rebuild the data sources and regenerate the score, are you sure you want to proceed? (Y/n)
default: n
required: true
jobs:
deploy_data:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Setup Poetry
uses: Gr1N/setup-poetry@v7
- name: Print poetry version
run: poetry --version
- name: Install dependencies
run: poetry install
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Generate Score
run: |
poetry run python3 data_pipeline/application.py score-full-run
- name: Upload Score to AWS
run: |
aws s3 sync ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline/data/score/csv --acl public-read --delete
- name: Generate Score Post
run: |
poetry run python3 data_pipeline/application.py generate-score-post -s aws
- name: Upload Score Post to AWS
run: |
aws s3 sync ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline/data/score/csv --acl public-read --delete
aws s3 sync ./data_pipeline/data/score/downloadable/ s3://justice40-data/data-pipeline/data/score/downloadable --acl public-read --delete

View file

@ -104,7 +104,7 @@ class ExtractTransformLoad:
# of the instance which is often a child class. # of the instance which is often a child class.
tmp_path = self.DATA_PATH / "tmp" / str(self.__class__.__name__) tmp_path = self.DATA_PATH / "tmp" / str(self.__class__.__name__)
# Create directory if it doesn't exist # Create directory if it doesn't exist already
tmp_path.mkdir(parents=True, exist_ok=True) tmp_path.mkdir(parents=True, exist_ok=True)
return tmp_path return tmp_path