mirror of
https://github.com/DOI-DO/j40-cejst-2.git
synced 2025-02-22 01:31:25 -08:00
Multiple workflow fixes
This commit is contained in:
parent
f8bd1acf26
commit
a80be7082d
3 changed files with 32 additions and 28 deletions
50
.github/workflows/deploy_backend_main.yml
vendored
50
.github/workflows/deploy_backend_main.yml
vendored
|
@ -1,21 +1,12 @@
|
|||
name: Deploy Backend Main
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
score_version:
|
||||
description: "Which version of the score are you generating?"
|
||||
required: true
|
||||
default: '2.0'
|
||||
type: choice
|
||||
options:
|
||||
- beta
|
||||
- 1.0
|
||||
- 2.0
|
||||
- test
|
||||
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "data/**"
|
||||
env:
|
||||
CENSUS_API_KEY: ${{ secrets.CENSUS_API_KEY }}
|
||||
J40_VERSION_LABEL_STRING: ${{ inputs.score_version }}
|
||||
J40_VERSION_LABEL_STRING: ${{ vars.SCORE_VERSION }}
|
||||
|
||||
jobs:
|
||||
generate-score-tiles:
|
||||
|
@ -81,28 +72,31 @@ jobs:
|
|||
run: |
|
||||
poetry run python3 data_pipeline/application.py geo-score
|
||||
- name: Run smoketest for 1.0
|
||||
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' || env.J40_VERSION_LABEL_STRING == 'test' }}
|
||||
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' }}
|
||||
run: |
|
||||
poetry run pytest data_pipeline/ -m smoketest
|
||||
- name: Deploy Score to Geoplatform AWS
|
||||
run: |
|
||||
poetry run s4cmd put ./data_pipeline/data/score/csv/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/csv --recursive --force
|
||||
poetry run s4cmd put ./data_pipeline/files/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --recursive --force
|
||||
poetry run s4cmd put ./data_pipeline/data/score/downloadable/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --recursive --force
|
||||
poetry run s4cmd put ./data_pipeline/data/score/csv/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/csv --sync-check --recursive --force
|
||||
poetry run s4cmd put ./data_pipeline/files/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --sync-check --recursive --force
|
||||
poetry run s4cmd put ./data_pipeline/data/score/downloadable/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --sync-check --recursive --force
|
||||
- name: Deploy 1.0 score post
|
||||
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' || env.J40_VERSION_LABEL_STRING == 'test' }}
|
||||
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' }}
|
||||
run: |
|
||||
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/1.0-shapefile-codebook.zip" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/1.0-communities.xlsx" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/1.0-communities.csv" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/1.0-shapefile-codebook.zip" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/cejst-technical-support-document.pdf" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/draft-communities-list.pdf" -s -f -I -o /dev/null
|
||||
- name: Deploy 2.0 score post
|
||||
if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' || env.J40_VERSION_LABEL_STRING == 'beta' }}
|
||||
if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' }}
|
||||
run: |
|
||||
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-data-documentation.zip" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-shapefile-codebook.zip" -s -f -I -o /dev/null
|
||||
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.xlsx" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.csv" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/cejst-technical-support-document.pdf" -s -f -I -o /dev/null && \
|
||||
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/draft-communities-list.pdf" -s -f -I -o /dev/null
|
||||
- name: Set timezone for tippecanoe
|
||||
uses: szenius/set-timezone@v2.0
|
||||
with:
|
||||
|
@ -125,6 +119,14 @@ jobs:
|
|||
poetry run python3 data_pipeline/application.py generate-map-tiles
|
||||
- name: Deploy Map to Geoplatform AWS
|
||||
run: |
|
||||
poetry run s4cmd put ./data_pipeline/data/score/geojson/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/geojson --recursive --force --num-threads=250
|
||||
poetry run s4cmd put ./data_pipeline/data/score/shapefile/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/shapefile --recursive --force
|
||||
poetry run s4cmd put ./data_pipeline/data/score/tiles/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/tiles --recursive --force --num-threads=250
|
||||
poetry run s4cmd put ./data_pipeline/data/score/geojson/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/geojson --sync-check --recursive --force --delete-removed --num-threads=250
|
||||
poetry run s4cmd put ./data_pipeline/data/score/shapefile/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/shapefile --sync-check --recursive --force --delete-removed
|
||||
poetry run s4cmd put ./data_pipeline/data/score/tiles/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/tiles --sync-check --recursive --force --delete-removed --num-threads=250
|
||||
- name: Invalidate cache on AWS CDN
|
||||
uses: chetan/invalidate-cloudfront-action@master
|
||||
env:
|
||||
DISTRIBUTION: ${{secrets.DATA_CDN_ID}}
|
||||
PATHS: "/*"
|
||||
AWS_REGION: "us-east-1"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
|
||||
|
|
6
.github/workflows/deploy_frontend_main.yml
vendored
6
.github/workflows/deploy_frontend_main.yml
vendored
|
@ -15,7 +15,7 @@ jobs:
|
|||
matrix:
|
||||
node-version: [14.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
|
@ -58,7 +58,7 @@ jobs:
|
|||
environment: Staging
|
||||
steps:
|
||||
- name: Checkout source
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
|
@ -68,7 +68,7 @@ jobs:
|
|||
run: |
|
||||
echo "DESTINATION_FOLDER=main" >> $GITHUB_ENV
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }}
|
||||
|
|
4
.github/workflows/markdown-link-check.yml
vendored
4
.github/workflows/markdown-link-check.yml
vendored
|
@ -11,4 +11,6 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run linkspector
|
||||
uses: umbrelladocs/action-linkspector@v1
|
||||
uses: umbrelladocs/action-linkspector@v1
|
||||
with:
|
||||
fail_on_error: true
|
Loading…
Add table
Reference in a new issue