name: Deploy Backend Main on: push: branches: [main] paths: - "data/**" - ".github/workflows/deploy_backend_main.yml" concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true env: CENSUS_API_KEY: ${{ secrets.CENSUS_API_KEY }} J40_VERSION_LABEL_STRING: ${{ vars.SCORE_VERSION }} jobs: generate-score-tiles: runs-on: ubuntu-latest defaults: run: working-directory: data/data-pipeline strategy: matrix: python-version: ['3.10'] environment: Staging steps: - name: Checkout source uses: actions/checkout@v4 - name: Print variables to help debug uses: hmarr/debug-action@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Load cached Poetry installation id: cached-poetry-dependencies uses: actions/cache@v4 with: path: ~/.cache/pypoetry/virtualenvs key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/deploy_backend_main.yml') }} - name: Install poetry uses: snok/install-poetry@v1 - name: Print Poetry settings run: poetry show -v - name: Install dependencies run: poetry add s4cmd && poetry install if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }} aws-region: us-east-1 - name: Cleanup Data run: | poetry run python3 -m data_pipeline.application data-cleanup - name: Cache Census Data id: cache-census uses: actions/cache@v4 with: path: data/data-pipeline/data_pipeline/data/census key: data-census - name: Get Census Data if: steps.cache-census.outputs.cache-hit != 'true' run: | poetry run python3 -m data_pipeline.application census-data-download - name: Run ETL run: | poetry run python3 -m data_pipeline.application etl-run - name: Generate Score run: | poetry run python3 -m data_pipeline.application score-run - name: Score Compare run: | poetry run python3 -m data_pipeline.comparator compare-score - name: Generate Score Post run: | poetry run python3 -m data_pipeline.application generate-score-post - name: Confirm we generated the version of the score we think we did if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' || env.J40_VERSION_LABEL_STRING == 'beta' }} run: | grep -v "Identified as disadvantaged due to tribal overlap" data_pipeline/data/score/downloadable/* > /dev/null - name: Generate Score Geo run: | poetry run python3 -m data_pipeline.application geo-score - name: Set timezone for tippecanoe uses: szenius/set-timezone@v2.0 with: timezoneLinux: "America/Los_Angeles" - name: Get tippecanoe run: | sudo apt-get install -y software-properties-common libsqlite3-dev zlib1g-dev sudo apt-add-repository -y ppa:git-core/ppa sudo mkdir -p /tmp/tippecanoe-src sudo git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src - name: Make tippecanoe working-directory: /tmp/tippecanoe-src run: | sudo /usr/bin/bash -c make mkdir -p /usr/local/bin cp tippecanoe /usr/local/bin/tippecanoe tippecanoe -v - name: Generate Tiles run: | poetry run python3 -m data_pipeline.application generate-map-tiles poetry run python3 -m data_pipeline.application generate-map-tiles --generate-tribal-layer - name: Deploy Score and Map to Geoplatform AWS if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' }} run: | poetry run s4cmd put ./data_pipeline/data/score/* s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/ --recursive --force --API-ACL=public-read --num-threads=250 poetry run s4cmd put ./data_pipeline/files/* s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable/ --recursive --force --API-ACL=public-read poetry run s4cmd put ./data_pipeline/data/tribal/* s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/tribal/ --recursive --force --API-ACL=public-read --num-threads=250 - name: 2.0 Post-deploy Score Check run: | curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-data-documentation.zip" -s -f -I -o /dev/null && \ curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-shapefile-codebook.zip" -s -f -I -o /dev/null curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.xlsx" -s -f -I -o /dev/null && \ curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.csv" -s -f -I -o /dev/null && \ curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/cejst-technical-support-document.pdf" -s -f -I -o /dev/null && \ curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/draft-communities-list.pdf" -s -f -I -o /dev/null - name: Invalidate cache on AWS CDN uses: chetan/invalidate-cloudfront-action@master env: DISTRIBUTION: ${{secrets.DATA_CDN_ID}} PATHS: "/*" AWS_REGION: "us-east-1" AWS_ACCESS_KEY_ID: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}