Merge pull request #36 from DOI-DO/nmb/workflow-cleanup

Cleanup unused Github workflows for safety
This commit is contained in:
Neil MartinsenBurrell 2024-12-12 09:19:46 -06:00 committed by GitHub
commit c05d101cbc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 0 additions and 864 deletions

View file

@ -1,20 +0,0 @@
name: Closed Backend PR
on:
pull_request:
types: [closed]
paths:
- "data/**"
env:
PR_NUMBER: ${{github.event.pull_request.number}}
jobs:
remove-artifacts:
runs-on: ubuntu-latest
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Remove staging artifacts
run: aws s3 rm s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}} --recursive

View file

@ -1,20 +0,0 @@
name: Closed Frontend PR
on:
pull_request:
types: [closed]
paths:
- "client/**/*"
env:
PR_NUMBER: ${{ github.event.number }}
jobs:
remove-artifacts:
runs-on: ubuntu-latest
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Remove staging artifacts
run: aws s3 rm s3://usds-geoplatform-justice40-website/justice40-tool/ --exclude "*" --include "$(echo $PR_NUMBER)-*" --recursive

View file

@ -1,87 +0,0 @@
name: Combine and Tilefy
on:
workflow_dispatch:
inputs:
confirm-action:
description: This will rebuild the data sources and regenerate the score, are you sure you want to proceed? (Y/n)
default: n
required: true
env:
BE_CDN_ID: E1324VDMNCO97N
jobs:
deploy_data:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Load cached Poetry installation
uses: actions/cache@v2
id: cached-poetry-dependencies
with:
path: ~/.cache/pypoetry/virtualenvs
key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/combine-tilefy.yml') }}
- name: Install poetry
uses: snok/install-poetry@v1
- name: Print Poetry settings
run: poetry show -v
- name: Install dependencies
run: poetry add s4cmd && poetry install
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
- name: Install GDAL/ogr2ogr
run: |
sudo add-apt-repository ppa:ubuntugis/ppa
sudo apt-get update
sudo apt-get -y install gdal-bin
ogrinfo --version
- name: Set timezone for tippecanoe
uses: szenius/set-timezone@v1.0
with:
timezoneLinux: "America/Los_Angeles"
- name: Get tippecanoe
run: |
sudo apt-get install -y software-properties-common libsqlite3-dev zlib1g-dev
sudo apt-add-repository -y ppa:git-core/ppa
sudo mkdir -p /tmp/tippecanoe-src
sudo git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src
- name: Make tippecanoe
working-directory: /tmp/tippecanoe-src
run: |
sudo /usr/bin/bash -c make
mkdir -p /usr/local/bin
cp tippecanoe /usr/local/bin/tippecanoe
tippecanoe -v
- name: Run Scripts
run: |
poetry run python3 data_pipeline/application.py geo-score -s aws
poetry run python3 data_pipeline/application.py generate-map-tiles
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Deploy to Geoplatform AWS
run: |
poetry run s4cmd put ./data_pipeline/data/score/geojson/ s3://justice40-data/data-pipeline/data/score/geojson --recursive --force --API-ACL=public-read --num-threads=250
poetry run s4cmd put ./data_pipeline/data/score/shapefile/ s3://justice40-data/data-pipeline/data/score/shapefile --recursive --force --API-ACL=public-read
poetry run s4cmd put ./data_pipeline/data/score/tiles/ s3://justice40-data/data-pipeline/data/score/tiles --recursive --force --API-ACL=public-read --num-threads=250
- name: Invalidate cache on AWS CDNs
uses: chetan/invalidate-cloudfront-action@master
env:
DISTRIBUTION: ${{env.BE_CDN_ID}}
PATHS: "/*"
AWS_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}

View file

@ -1,125 +0,0 @@
name: Create Score Version
on:
workflow_dispatch:
inputs:
score_version:
description: "Which version of the score are you generating?"
required: true
default: '1.0'
type: choice
options:
- beta
- 1.0
- test
env:
CENSUS_API_KEY: ${{ secrets.CENSUS_API_KEY }}
J40_VERSION_LABEL_STRING: ${{ inputs.score_version }}
jobs:
generate-score-tiles:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Load cached Poetry installation
id: cached-poetry-dependencies
uses: actions/cache@v2
with:
path: ~/.cache/pypoetry/virtualenvs
key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/create-score-version.yml') }}
- name: Install poetry
uses: snok/install-poetry@v1
- name: Print Poetry settings
run: poetry show -v
- name: Install dependencies
run: poetry add s4cmd && poetry install
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Install GDAL/ogr2ogr
run: |
sudo apt-get update
sudo apt-get -y install gdal-bin
ogrinfo --version
- name: Generate Score
run: |
poetry run python3 data_pipeline/application.py score-full-run
- name: Generate Score Post
run: |
poetry run python3 data_pipeline/application.py generate-score-post -s aws
- name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' || env.J40_VERSION_LABEL_STRING == 'test' }}
run: |
grep "Identified as disadvantaged due to tribal overlap" data_pipeline/data/score/downloadable/* > /dev/null
- name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == 'beta' }}
run: |
grep -v "Identified as disadvantaged due to tribal overlap" data_pipeline/data/score/downloadable/* > /dev/null
- name: Generate Score Geo
run: |
poetry run python3 data_pipeline/application.py geo-score
- name: Run smoketest for 1.0
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' || env.J40_VERSION_LABEL_STRING == 'test' }}
run: |
poetry run pytest data_pipeline/ -m smoketest
- name: Deploy Score to Geoplatform AWS
run: |
poetry run s4cmd put ./data_pipeline/data/score/csv/ s3://justice40-data/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/csv --recursive --force --API-ACL=public-read
poetry run s4cmd put ./data_pipeline/files/ s3://justice40-data/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --recursive --force --API-ACL=public-read
poetry run s4cmd put ./data_pipeline/data/score/downloadable/ s3://justice40-data/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --recursive --force --API-ACL=public-read
- name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' || env.J40_VERSION_LABEL_STRING == 'test' }}
run: |
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/1.0/data/score/downloadable/1.0-shapefile-codebook.zip" -s -f -I -o /dev/null && \
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/1.0/data/score/downloadable/1.0-communities.xlsx" -s -f -I -o /dev/null && \
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/1.0/data/score/downloadable/1.0-communities.csv" -s -f -I -o /dev/null && \
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/1.0/data/score/downloadable/1.0-shapefile-codebook.zip" -s -f -I -o /dev/null && \
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/1.0/data/score/downloadable/cejst-technical-support-document.pdf" -s -f -I -o /dev/null && \
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/1.0/data/score/downloadable/draft-communities-list.pdf" -s -f -I -o /dev/null
- name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == 'beta' }}
run: |
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/beta/data/score/downloadable/beta-data-documentation.zip" -s -f -I -o /dev/null && \
curl "https://static-data-screeningtool.geoplatform.gov/data-versions/beta/data/score/downloadable/beta-shapefile-codebook.zip" -s -f -I -o /dev/null
- name: Set timezone for tippecanoe
uses: szenius/set-timezone@v1.0
with:
timezoneLinux: "America/Los_Angeles"
- name: Get tippecanoe
run: |
sudo apt-get install -y software-properties-common libsqlite3-dev zlib1g-dev
sudo apt-add-repository -y ppa:git-core/ppa
sudo mkdir -p /tmp/tippecanoe-src
sudo git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src
- name: Make tippecanoe
working-directory: /tmp/tippecanoe-src
run: |
sudo /usr/bin/bash -c make
mkdir -p /usr/local/bin
cp tippecanoe /usr/local/bin/tippecanoe
tippecanoe -v
- name: Generate Tiles
run: |
poetry run python3 data_pipeline/application.py generate-map-tiles
- name: Deploy Map to Geoplatform AWS
run: |
poetry run s4cmd put ./data_pipeline/data/score/geojson/ s3://justice40-data/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/geojson --recursive --force --API-ACL=public-read --num-threads=250
poetry run s4cmd put ./data_pipeline/data/score/shapefile/ s3://justice40-data/data-versions/${{env.J40_VERSION_LABEL_STRING}}/${{env.SHA_NUMBER}}/data/score/shapefile --recursive --force --API-ACL=public-read
poetry run s4cmd put ./data_pipeline/data/score/tiles/ s3://justice40-data/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/tiles --recursive --force --API-ACL=public-read --num-threads=250

View file

@ -1,132 +0,0 @@
name: Deploy Backend Staging
on:
pull_request:
branches:
- main
- "**/release/**"
paths:
- "data/**"
env:
PR_NUMBER: ${{github.event.pull_request.number}}
SHA_NUMBER: ${{github.event.pull_request.head.sha}}
CENSUS_API_KEY: ${{ secrets.CENSUS_API_KEY }}
jobs:
generate-score-tiles:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- run: pip install -U wheel
- name: Load cached Poetry installation
id: cached-poetry-dependencies
uses: actions/cache@v2
with:
path: ~/.cache/pypoetry/virtualenvs
key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/deploy_be_staging.yml') }}
- name: Install poetry
uses: snok/install-poetry@v1.3.3
- name: Print Poetry settings
run: poetry show -v
- name: Install GDAL/ogr2ogr
run: |
sudo apt-get update
sudo apt-get -y install gdal-bin
ogrinfo --version
- name: Install dependencies
run: poetry add s4cmd && poetry install
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Download census geo data for later user
run: |
poetry run python3 data_pipeline/application.py pull-census-data -s aws
- name: Generate Score
run: |
poetry run python3 data_pipeline/application.py score-full-run
- name: Generate Score Post
run: |
poetry run python3 data_pipeline/application.py generate-score-post
- name: Generate Score Geo
run: |
poetry run python3 data_pipeline/application.py geo-score
- name: Run Smoketests
run: |
poetry run pytest data_pipeline/ -m smoketest
- name: Deploy Score to Geoplatform AWS
run: |
poetry run s4cmd put ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/csv --recursive --force --API-ACL=public-read
poetry run s4cmd put ./data_pipeline/files/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/downloadable --recursive --force --API-ACL=public-read
- name: Update PR with deployed Score URLs
uses: mshick/add-pr-comment@v1
with:
# Deploy to S3 for the Staging URL
message: |
** Score Deployed! **
Find it here:
- Score Full usa.csv: https://justice40-data.s3.amazonaws.com/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/csv/full/usa.csv
- Download Zip Packet: https://justice40-data.s3.amazonaws.com/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/downloadable/Screening_Tool_Data.zip
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: "github-actions[bot]"
allow-repeats: false
- name: Perform Score Comparisons
run: |
poetry run python3 data_pipeline/comparator.py compare-score
- name: Update PR with Score Comparisons
uses: mshick/add-pr-comment@v2
with:
message-path: ./data/data-pipeline/data_pipeline/data/tmp/Comparator/Score/comparison-summary.md
repo-token: ${{ secrets.GITHUB_TOKEN }}
allow-repeats: false
- name: Set timezone for tippecanoe
uses: szenius/set-timezone@v1.0
with:
timezoneLinux: "America/Los_Angeles"
- name: Get tippecanoe
run: |
sudo apt-get install -y software-properties-common libsqlite3-dev zlib1g-dev
sudo apt-add-repository -y ppa:git-core/ppa
sudo mkdir -p /tmp/tippecanoe-src
sudo git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src
- name: Make tippecanoe
working-directory: /tmp/tippecanoe-src
run: |
sudo /usr/bin/bash -c make
mkdir -p /usr/local/bin
cp tippecanoe /usr/local/bin/tippecanoe
tippecanoe -v
- name: Generate Tiles
run: |
poetry run python3 data_pipeline/application.py generate-map-tiles
- name: Deploy Map to Geoplatform AWS
run: |
poetry run s4cmd put ./data_pipeline/data/score/geojson/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/geojson --recursive --force --API-ACL=public-read --num-threads=250
poetry run s4cmd put ./data_pipeline/data/score/shapefile/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/shapefile --recursive --force --API-ACL=public-read
poetry run s4cmd put ./data_pipeline/data/score/tiles/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/tiles --recursive --force --API-ACL=public-read --num-threads=250
poetry run s4cmd put ./data_pipeline/data/score/downloadable/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/downloadable --recursive --force --API-ACL=public-read
- name: Update PR with deployed Map URL
uses: mshick/add-pr-comment@v1
with:
# Deploy to S3 for the staging URL
message: |
** Map Deployed! **
Map with Staging Backend: https://screeningtool.geoplatform.gov/en?flags=stage_hash=${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}
Find tiles here: https://justice40-data.s3.amazonaws.com/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/tiles
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: "github-actions[bot]"
allow-repeats: false

View file

@ -1,101 +0,0 @@
name: Deploy Frontend Prod
on:
push:
branches: [main]
paths:
- "client/**/*"
env:
PR_NUMBER: ${{ github.event.number }}
WEB_CDN_ID: ED03LPVC4OXSW
jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: client
strategy:
matrix:
node-version: [14.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
- name: Set DESTINATION_FOLDER for main
run: |
echo "DESTINATION_FOLDER=main" >> $GITHUB_ENV
- name: Install
run: npm ci
- name: Build
run: npm run build --if-present
env:
# See the client readme for more info on environment variables:
# https://github.com/usds/justice40-tool/blob/main/client/README.md
DATA_SOURCE: cdn
# TODO: Update main URL when either is back up
SITE_URL: "${{ secrets.PROD_SITE_URL }}"
MAPBOX_STYLES_READ_TOKEN: "${{ secrets.MAPBOX_STYLES_READ_TOKEN }}"
- name: Get directory contents
run: ls -la public
- name: Lint
run: npm run lint
# Disabling for now due to jsonlint - TODO: put this back
# - name: License Check
# run: npm run licenses
- name: Test
run: npm test
# - name: Check for security vulnerabilities
# run: npm audit --production
- name: Upload Artifact
uses: actions/upload-artifact@v2
with:
name: J40Static
# Upload-artifact does not support the default working directory
# See more: https://github.com/actions/upload-artifact/issues/87
path: ./client/public
deploy:
runs-on: ubuntu-latest
needs: build
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Download Artifacts
uses: actions/download-artifact@v2
with:
name: J40Static
path: ./public
- name: Set DESTINATION_FOLDER for main
run: |
echo "DESTINATION_FOLDER=main" >> $GITHUB_ENV
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Deploy to Geoplatform AWS
run: aws s3 sync ./public/ s3://usds-geoplatform-justice40-website/justice40-tool/${{env.DESTINATION_FOLDER}} --acl public-read --delete
- name: Invalidate cache on AWS CDNs
uses: chetan/invalidate-cloudfront-action@master
env:
DISTRIBUTION: ${{env.WEB_CDN_ID}}
PATHS: "/*"
AWS_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
- name: Update PR with deployed URL
uses: mshick/add-pr-comment@v1
with:
message: |
**🚢 PR Deployed! 🚢**
Find it here: https://screeningtool.geoplatform.gov !
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: "github-actions[bot]" # The user.login for temporary GitHub tokens
allow-repeats: false # This is the default
- name: Printing deployment URLs
run: |
echo "Geoplatform official site (may or may not be up): https://screeningtool.geoplatform.gov/"
echo "The Cloudfront behind this official site: https://d3r1k4gynx5dw4.cloudfront.net"
echo "Github pages: https://usds.github.io/justice40-tool/$DESTINATION_FOLDER/en"
echo "The origin S3 bucket (http only) : http://usds-geoplatform-justice40-website.s3-website-us-east-1.amazonaws.com/justice40-tool/$DESTINATION_FOLDER/en"

View file

@ -1,116 +0,0 @@
name: Deploy Frontend Staging
on:
pull_request:
branches:
- main
- '**/release/**'
paths:
- "client/**/*"
env:
PR_NUMBER: ${{github.event.pull_request.number}}
WORKING_DIRECTORY: ./client
jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: client
strategy:
matrix:
node-version: [14.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
- name: Set DESTINATION_FOLDER for branch
run: |
echo "DESTINATION_FOLDER=$(echo $PR_NUMBER-$GITHUB_SHA | cut -c 1-11)" >> $GITHUB_ENV
- name: Install
run: npm ci
- name: Build
run: npm run build --if-present
env:
# See the client readme for more info on environment variables:
# https://github.com/usds/justice40-tool/blob/main/client/README.md
DATA_SOURCE: cdn
SITE_URL: "${{ secrets.STAGE_SITE_URL }}"
PATH_PREFIX: "/${{env.DESTINATION_FOLDER}}"
MAPBOX_STYLES_READ_TOKEN: "${{ secrets.MAPBOX_STYLES_READ_TOKEN }}"
- name: Get directory contents
run: ls -la public
- name: Lint
run: npm run lint
# Disabling for now due to jsonlint - TODO: put this back
# - name: License Check
# run: npm run licenses
- name: Test
run: npm test
# - name: Check for security vulnerabilities
# run: npm audit --production
# - name: Cypress / Gherkin integration tests 🌃
# uses: cypress-io/github-action@v4
# with:
# working-directory: ${{env.WORKING_DIRECTORY}}
# browser: chrome
# start: npm start
# wait-on: "http://localhost:8000/en"
- name: Upload Artifact
uses: actions/upload-artifact@v2
with:
name: J40Static
# Upload-artifact does not support the default working directory
# See more: https://github.com/actions/upload-artifact/issues/87
path: ./client/public
deploy:
runs-on: ubuntu-latest
needs: build
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Download Artifacts
uses: actions/download-artifact@v2
with:
name: J40Static
path: ./public
- name: Set DESTINATION_FOLDER for branch
run: |
echo "DESTINATION_FOLDER=$(echo $PR_NUMBER-$GITHUB_SHA | cut -c 1-11)" >> $GITHUB_ENV
- name: Set COMMIT_HASH for branch
run: |
echo "COMMIT_HASH=$(echo $GITHUB_SHA)" >> $GITHUB_ENV
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Deploy to Geoplatform AWS
run: aws s3 sync ./public/ s3://usds-geoplatform-justice40-website/justice40-tool/${{env.DESTINATION_FOLDER}} --acl public-read --delete
- name: Update PR with deployed URL
uses: mshick/add-pr-comment@v1
with:
# Deploy to S3 for the staging URL
message: |
**🚢 Here is the frontend staging link: 🚢**
Find it here: https://screeningtool-staging.geoplatform.gov/${{env.DESTINATION_FOLDER}}/en/ !
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: "github-actions[bot]" # The user.login for temporary GitHub tokens
allow-repeats: false # This is the default
- name: Printing deployment URLs
run: |
echo "Github pages: https://usds.github.io/justice40-tool/$DESTINATION_FOLDER/en"
- name: Get changed files using defaults
id: changed-files
uses: tj-actions/changed-files@v18.1
- name: Update PR with message that en.json has been updated
uses: mshick/add-pr-comment@v1
with:
message: |
** 👋 Attention translators!! 👋 **
Copy changes have resulted in a new en.json file. Please download en.json file and send to translators: https://github.com/usds/justice40-tool/blob/${{env.COMMIT_HASH}}/client/src/intl/en.json
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: "github-actions[bot]" # The user.login for temporary GitHub tokens
allow-repeats: true
if: contains(steps.changed-files.outputs.modified_files, 'client/src/intl/en.json')

View file

@ -1,28 +0,0 @@
# Full command list of cypress github actions:
# https://github.com/cypress-io/github-action#cypress-iogithub-action--
name: End to End Tests
on:
# To run tests on the push event of a specific branch:
# push:
# branches:
# - vimusds/cypress-debug-download
schedule:
# runs tests every day at 12am ET (4am UTC):
- cron: '0 4 * * *'
jobs:
nightly:
runs-on: ubuntu-20.04
env:
working-directory: ./client
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cypress nightly tests 🌃
uses: cypress-io/github-action@v6
with:
working-directory: ${{env.working-directory}}
start: npm start
wait-on: 'http://localhost:8000'
# To run only specific spec/tests:
# spec: cypress/e2e/downloadPacket.spec.js

View file

@ -1,58 +0,0 @@
name: Generate Census
on:
workflow_dispatch:
inputs:
confirm-action:
description: This will rebuild the census data and upload it to S3, are you sure you want to proceed? (Y/n)
default: n
required: true
jobs:
deploy_data:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Setup Poetry
uses: Gr1N/setup-poetry@v7
- name: Print poetry version
run: poetry --version
- name: Install dependencies
run: poetry install
- name: Install GDAL/ogr2ogr
run: |
sudo add-apt-repository ppa:ubuntugis/ppa
sudo apt-get -y install gdal-bin
ogrinfo --version
- name: Run Census Script
run: |
poetry run python3 data_pipeline/application.py census-data-download -zc
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Upload Census Zip to Geoplatform AWS
run: |
aws s3 cp ./data_pipeline/data/tmp/census.zip s3://justice40-data/data-sources/census.zip --acl public-read
- name: Update PR with Comment about deployment
uses: mshick/add-pr-comment@v1
with:
message: |
Data Synced! Find it here: s3://justice40-data/data-pipeline/data/
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token-user-login: 'github-actions[bot]' # The user.login for temporary GitHub tokens
allow-repeats: false # This is the default

View file

@ -1,60 +0,0 @@
name: Generate Score
on:
workflow_dispatch:
inputs:
confirm-action:
description: This will rebuild the data sources and regenerate the score, are you sure you want to proceed? (Y/n)
default: n
required: true
jobs:
deploy_data:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Load cached Poetry installation
id: cached-poetry-dependencies
uses: actions/cache@v2
with:
path: ~/.cache/pypoetry/virtualenvs
key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/generate-score.yml') }}
- name: Install poetry
uses: snok/install-poetry@v1
- name: Print Poetry settings
run: poetry show -v
- name: Install dependencies
run: poetry install
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Generate Score
run: |
poetry run python3 data_pipeline/application.py score-full-run
- name: Upload Score to AWS
run: |
aws s3 sync ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline/data/score/csv --acl public-read --delete
- name: Generate Score Post
run: |
poetry run python3 data_pipeline/application.py generate-score-post -s aws
- name: Upload Score Post to AWS
run: |
aws s3 sync ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline/data/score/csv --acl public-read --delete
aws s3 sync ./data_pipeline/data/score/downloadable/ s3://justice40-data/data-pipeline/data/score/downloadable --acl public-read --delete
aws s3 cp ./data_pipeline/files/ s3://justice40-data/data-pipeline/data/score/downloadable --acl public-read --recursive

View file

@ -1,31 +0,0 @@
name: Refresh Backend CDN
on:
workflow_dispatch:
inputs:
confirm-action:
description: This will invalidate the backend CDN, are you sure you want to proceed? (Y/n)
default: n
required: true
# to allow this action to run every 6 hours
# schedule:
# - cron: "0 */6 * * *"
env:
BE_CDN_ID: E1324VDMNCO97N
jobs:
invalidate-cdn:
runs-on: ubuntu-20.04
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Invalidate cache on AWS CDNs
uses: chetan/invalidate-cloudfront-action@master
env:
DISTRIBUTION: ${{env.BE_CDN_ID}}
PATHS: "/*"
AWS_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}

View file

@ -1,86 +0,0 @@
name: Tribal Layer Deploy
on:
workflow_dispatch:
inputs:
confirm-action:
description: This will deploy tribal map layer, are you sure you want to proceed? (Y/n)
default: n
required: true
env:
BE_CDN_ID: E1324VDMNCO97N
jobs:
deploy_data:
runs-on: ubuntu-latest
defaults:
run:
working-directory: data/data-pipeline
strategy:
matrix:
python-version: [3.9]
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Print variables to help debug
uses: hmarr/debug-action@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Load cached Poetry installation
uses: actions/cache@v2
id: cached-poetry-dependencies
with:
path: ~/.cache/pypoetry/virtualenvs
key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/combine-tilefy.yml') }}
- name: Install poetry
uses: snok/install-poetry@v1
- name: Print Poetry settings
run: poetry show -v
- name: Install dependencies
run: poetry add s4cmd && poetry install
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
- name: Install GDAL/ogr2ogr
run: |
sudo add-apt-repository ppa:ubuntugis/ppa
sudo apt-get update
sudo apt-get -y install gdal-bin
ogrinfo --version
- name: Set timezone for tippecanoe
uses: szenius/set-timezone@v1.0
with:
timezoneLinux: "America/Los_Angeles"
- name: Get tippecanoe
run: |
sudo apt-get install -y software-properties-common libsqlite3-dev zlib1g-dev
sudo apt-add-repository -y ppa:git-core/ppa
sudo mkdir -p /tmp/tippecanoe-src
sudo git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src
- name: Make tippecanoe
working-directory: /tmp/tippecanoe-src
run: |
sudo /usr/bin/bash -c make
mkdir -p /usr/local/bin
cp tippecanoe /usr/local/bin/tippecanoe
tippecanoe -v
- name: Run Scripts
run: |
poetry run python3 data_pipeline/application.py etl-run --dataset tribal
poetry run python3 data_pipeline/application.py generate-map-tiles --generate-tribal-layer
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Deploy to Geoplatform AWS
run: |
poetry run s4cmd put ./data_pipeline/data/tribal/geojson/ s3://justice40-data/data-pipeline/data/tribal/geojson --recursive --force --API-ACL=public-read --num-threads=250
poetry run s4cmd put ./data_pipeline/data/tribal/tiles/ s3://justice40-data/data-pipeline/data/tribal/tiles --recursive --force --API-ACL=public-read --num-threads=250
- name: Invalidate cache on AWS CDNs
uses: chetan/invalidate-cloudfront-action@master
env:
DISTRIBUTION: ${{env.BE_CDN_ID}}
PATHS: "/*"
AWS_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}