diff --git a/.github/workflows/closed_fe_pr.yml b/.github/workflows/closed_fe_pr.yml new file mode 100644 index 00000000..5cbcf80f --- /dev/null +++ b/.github/workflows/closed_fe_pr.yml @@ -0,0 +1,20 @@ +name: Closed Frontend PR +on: + pull_request: + types: [closed] + paths: + - "client/**/*" +env: + PR_NUMBER: ${{ github.event.number }} +jobs: + remove-artifacts: + runs-on: ubuntu-latest + steps: + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + - name: Remove staging artifacts + run: aws s3 rm s3://usds-geoplatform-justice40-website/justice40-tool/ --exclude "*" --include "$(echo $PR_NUMBER)-*" --recursive diff --git a/.github/workflows/closed_pr.yml b/.github/workflows/closed_pr.yml deleted file mode 100644 index 1867e72a..00000000 --- a/.github/workflows/closed_pr.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Closed PR -on: - pull_request: - types: [ closed ] - paths: - - "client/**/*" -env: - PR_NUMBER: ${{ github.event.number }} -jobs: - remove-artifacts: - runs-on: ubuntu-latest - steps: - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-1 - - name: Remove staging artifacts - run: aws s3 rm s3://usds-geoplatform-justice40-website/justice40-tool/ --exclude "*" --include "$(echo $PR_NUMBER)-*" --recursive diff --git a/.github/workflows/deploy_be_staging.yml b/.github/workflows/deploy_be_staging.yml new file mode 100644 index 00000000..b4e7f2ef --- /dev/null +++ b/.github/workflows/deploy_be_staging.yml @@ -0,0 +1,89 @@ +name: Deploy Frontend staging +on: + pull_request: + branches: [main] + paths: + - "data/*" +env: + PR_NUMBER: ${{github.event.pull_request.number}} + SHA_NUMBER: ${{github.event.pull_request.head.sha}} +jobs: + generate-score-tiles: + runs-on: ubuntu-latest + defaults: + run: + working-directory: data/data-pipeline + strategy: + matrix: + python-version: [3.9] + steps: + - name: Checkout source + uses: actions/checkout@v2 + - name: Print variables to help debug + uses: hmarr/debug-action@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Setup Poetry + uses: Gr1N/setup-poetry@v7 + - name: Print poetry version + run: poetry --version + - name: Install dependencies + run: poetry install + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + - name: Generate Score + run: | + poetry run python3 data_pipeline/application.py score-full-run + - name: Generate Score Post + run: | + poetry run python3 data_pipeline/application.py generate-score-post -s aws + - name: Generate Score Geo + run: | + poetry run python3 data_pipeline/application.py geo-score + - name: Install GDAL/ogr2ogr + run: | + sudo add-apt-repository ppa:ubuntugis/ppa + sudo apt-get -y install gdal-bin + ogrinfo --version + - name: Set timezone for tippecanoe + uses: szenius/set-timezone@v1.0 + with: + timezoneLinux: "America/Los_Angeles" + - name: Get tippecanoe + run: | + sudo apt-get install -y software-properties-common libsqlite3-dev zlib1g-dev + sudo apt-add-repository -y ppa:git-core/ppa + sudo mkdir -p /tmp/tippecanoe-src + sudo git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src + - name: Make tippecanoe + working-directory: /tmp/tippecanoe-src + run: | + sudo /usr/bin/bash -c make + mkdir -p /usr/local/bin + cp tippecanoe /usr/local/bin/tippecanoe + tippecanoe -v + - name: Generate Tiles + run: | + poetry run python3 data_pipeline/application.py generate-map-tiles + - name: Deploy to Geoplatform AWS + run: | + aws s3 cp ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/csv --acl public-read --delete + aws s3 cp ./data_pipeline/data/score/downloadable/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/downloadable --acl public-read --delete + aws s3 cp ./data_pipeline/data/score/geojson/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/${{env.PR_NUMBER}}score/geojson --acl public-read --delete + aws s3 cp ./data_pipeline/data/score/tiles/ s3://justice40-data/data-pipeline-staging/${{env.PR_NUMBER}}/${{env.SHA_NUMBER}}/data/score/tiles --recursive --acl public-read + - name: Update PR with deployed URL + uses: mshick/add-pr-comment@v1 + with: + # Deploy to S3 for the staging URL + message: | + **🚢 PR Deployed! 🚢** + Find it here: http://usds-geoplatform-justice40-website.s3-website-us-east-1.amazonaws.com/justice40-tool/${{env.PR_NUMBER}}/en/cejst/ ! + repo-token: ${{ secrets.GITHUB_TOKEN }} + repo-token-user-login: "github-actions[bot]" # The user.login for temporary GitHub tokens + allow-repeats: false # This is the default diff --git a/.github/workflows/deploy_main.yml b/.github/workflows/deploy_fe_main.yml similarity index 92% rename from .github/workflows/deploy_main.yml rename to .github/workflows/deploy_fe_main.yml index 051942e6..776339dc 100644 --- a/.github/workflows/deploy_main.yml +++ b/.github/workflows/deploy_fe_main.yml @@ -1,15 +1,12 @@ -# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions - -name: Deploy prod +name: Deploy Frontend Prod on: push: branches: [main] paths: - "client/**/*" -env: +env: PR_NUMBER: ${{ github.event.number }} - WEB_CDN_ID: E27WCOZZ03KIBX + WEB_CDN_ID: E27WCOZZ03KIBX jobs: build: runs-on: ubuntu-latest @@ -33,7 +30,7 @@ jobs: - name: Build run: npm run build --if-present env: - # See the client readme for more info on environment variables: + # See the client readme for more info on environment variables: # https://github.com/usds/justice40-tool/blob/main/client/README.md DATA_SOURCE: cdn # TODO: Update main URL when either is back up diff --git a/.github/workflows/deploy_staging.yml b/.github/workflows/deploy_fe_staging.yml similarity index 91% rename from .github/workflows/deploy_staging.yml rename to .github/workflows/deploy_fe_staging.yml index 904653ee..54b7121d 100644 --- a/.github/workflows/deploy_staging.yml +++ b/.github/workflows/deploy_fe_staging.yml @@ -1,13 +1,10 @@ -# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions - -name: Deploy stage +name: Deploy Frontend staging on: pull_request: branches: [main] paths: - "client/**/*" -env: +env: PR_NUMBER: ${{github.event.pull_request.number}} WORKING_DIRECTORY: ./client jobs: @@ -33,7 +30,7 @@ jobs: - name: Build run: npm run build --if-present env: - # See the client readme for more info on environment variables: + # See the client readme for more info on environment variables: # https://github.com/usds/justice40-tool/blob/main/client/README.md DATA_SOURCE: cdn SITE_URL: "${{ secrets.STAGE_SITE_URL }}" @@ -55,7 +52,7 @@ jobs: with: working-directory: ${{env.WORKING_DIRECTORY}} start: npm start - wait-on: 'http://localhost:8000' + wait-on: "http://localhost:8000" - name: Upload Artifact uses: actions/upload-artifact@v2 with: diff --git a/.github/workflows/generate-score.yml b/.github/workflows/generate-score.yml deleted file mode 100644 index d24442e3..00000000 --- a/.github/workflows/generate-score.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Generate Score -on: - workflow_dispatch: - inputs: - confirm-action: - description: This will rebuild the data sources and regenerate the score, are you sure you want to proceed? (Y/n) - default: n - required: true - -jobs: - deploy_data: - runs-on: ubuntu-latest - defaults: - run: - working-directory: data/data-pipeline - strategy: - matrix: - python-version: [3.9] - steps: - - name: Checkout source - uses: actions/checkout@v2 - - name: Print variables to help debug - uses: hmarr/debug-action@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Setup Poetry - uses: Gr1N/setup-poetry@v7 - - name: Print poetry version - run: poetry --version - - name: Install dependencies - run: poetry install - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-1 - - name: Generate Score - run: | - poetry run python3 data_pipeline/application.py score-full-run - - name: Upload Score to AWS - run: | - aws s3 sync ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline/data/score/csv --acl public-read --delete - - name: Generate Score Post - run: | - poetry run python3 data_pipeline/application.py generate-score-post -s aws - - name: Upload Score Post to AWS - run: | - aws s3 sync ./data_pipeline/data/score/csv/ s3://justice40-data/data-pipeline/data/score/csv --acl public-read --delete - aws s3 sync ./data_pipeline/data/score/downloadable/ s3://justice40-data/data-pipeline/data/score/downloadable --acl public-read --delete diff --git a/data/data-pipeline/data_pipeline/etl/base.py b/data/data-pipeline/data_pipeline/etl/base.py index be1dd86a..67a61f0a 100644 --- a/data/data-pipeline/data_pipeline/etl/base.py +++ b/data/data-pipeline/data_pipeline/etl/base.py @@ -104,7 +104,7 @@ class ExtractTransformLoad: # of the instance which is often a child class. tmp_path = self.DATA_PATH / "tmp" / str(self.__class__.__name__) - # Create directory if it doesn't exist + # Create directory if it doesn't exist already tmp_path.mkdir(parents=True, exist_ok=True) return tmp_path