Merge pull request #35 from agilesix/cfelix/merge-v2-20241210

CEQ-J40 merge v2 code - 20241210
This commit is contained in:
Neil MartinsenBurrell 2024-12-10 15:30:20 -06:00 committed by GitHub
commit b4543ded6f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 6399 additions and 668 deletions

View file

@ -60,12 +60,10 @@ jobs:
- name: Get Census Data - name: Get Census Data
run: | run: |
poetry run python3 -m data_pipeline.application census-data-download poetry run python3 -m data_pipeline.application census-data-download
- name: Extract Data Sources
run: |
poetry run python3 -m data_pipeline.application extract-data-sources
- name: Run ETL - name: Run ETL
run: | run: |
poetry run python3 -m data_pipeline.application etl-run poetry run python3 -m data_pipeline.application etl-run
poetry run python3 -m data_pipeline.application etl-run --dataset tribal
- name: Generate Score - name: Generate Score
run: | run: |
poetry run python3 -m data_pipeline.application score-run poetry run python3 -m data_pipeline.application score-run
@ -75,10 +73,6 @@ jobs:
- name: Generate Score Post - name: Generate Score Post
run: | run: |
poetry run python3 -m data_pipeline.application generate-score-post poetry run python3 -m data_pipeline.application generate-score-post
- name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' || env.J40_VERSION_LABEL_STRING == 'test' }}
run: |
grep "Identified as disadvantaged due to tribal overlap" data_pipeline/data/score/downloadable/* > /dev/null
- name: Confirm we generated the version of the score we think we did - name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' || env.J40_VERSION_LABEL_STRING == 'beta' }} if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' || env.J40_VERSION_LABEL_STRING == 'beta' }}
run: | run: |
@ -86,33 +80,6 @@ jobs:
- name: Generate Score Geo - name: Generate Score Geo
run: | run: |
poetry run python3 -m data_pipeline.application geo-score poetry run python3 -m data_pipeline.application geo-score
- name: Run smoketest for 1.0
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' }}
run: |
poetry run pytest data_pipeline/ -m smoketest
- name: Deploy Score to Geoplatform AWS
run: |
poetry run s4cmd put ./data_pipeline/data/score/csv/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/csv --sync-check --recursive --force
poetry run s4cmd put ./data_pipeline/files/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --sync-check --recursive --force
poetry run s4cmd put ./data_pipeline/data/score/downloadable/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable --sync-check --recursive --force
poetry run s4cmd put ./data_pipeline/data/score/search/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/search --sync-check --recursive --force
- name: Deploy 1.0 score post
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' }}
run: |
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/1.0-shapefile-codebook.zip" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/1.0-communities.xlsx" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/1.0-communities.csv" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/cejst-technical-support-document.pdf" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/1.0/data/score/downloadable/draft-communities-list.pdf" -s -f -I -o /dev/null
- name: Deploy 2.0 score post
if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' }}
run: |
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-data-documentation.zip" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-shapefile-codebook.zip" -s -f -I -o /dev/null
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.xlsx" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.csv" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/cejst-technical-support-document.pdf" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/draft-communities-list.pdf" -s -f -I -o /dev/null
- name: Set timezone for tippecanoe - name: Set timezone for tippecanoe
uses: szenius/set-timezone@v2.0 uses: szenius/set-timezone@v2.0
with: with:
@ -133,16 +100,26 @@ jobs:
- name: Generate Tiles - name: Generate Tiles
run: | run: |
poetry run python3 -m data_pipeline.application generate-map-tiles poetry run python3 -m data_pipeline.application generate-map-tiles
- name: Deploy Map to Geoplatform AWS poetry run python3 -m data_pipeline.application generate-map-tiles --generate-tribal-layer
- name: Deploy Score and Map to Geoplatform AWS
if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' }}
run: | run: |
poetry run s4cmd put ./data_pipeline/data/score/geojson/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/geojson --sync-check --recursive --force --delete-removed --num-threads=250 poetry run s4cmd put ./data_pipeline/data/score/* s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/ --sync-check --recursive --force --API-ACL=public-read --num-threads=250
poetry run s4cmd put ./data_pipeline/data/score/shapefile/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/shapefile --sync-check --recursive --force --delete-removed poetry run s4cmd put ./data_pipeline/files/* s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/downloadable/ --sync-check --API-ACL=public-read --recursive --force
poetry run s4cmd put ./data_pipeline/data/score/tiles/ s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/score/tiles --sync-check --recursive --force --delete-removed --num-threads=250 poetry run s4cmd put ./data_pipeline/data/tribal/* s3://${{secrets.S3_DATA_BUCKET}}/data-versions/${{env.J40_VERSION_LABEL_STRING}}/data/tribal/ --recursive --force --API-ACL=public-read --num-threads=250
- name: 2.0 Post-deploy Score Check
run: |
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-data-documentation.zip" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-shapefile-codebook.zip" -s -f -I -o /dev/null
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.xlsx" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/2.0-communities.csv" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/cejst-technical-support-document.pdf" -s -f -I -o /dev/null && \
curl "${{secrets.DATA_URL}}/data-versions/2.0/data/score/downloadable/draft-communities-list.pdf" -s -f -I -o /dev/null
- name: Invalidate cache on AWS CDN - name: Invalidate cache on AWS CDN
uses: chetan/invalidate-cloudfront-action@master uses: chetan/invalidate-cloudfront-action@master
env: env:
DISTRIBUTION: ${{secrets.DATA_CDN_ID}} DISTRIBUTION: ${{secrets.DATA_CDN_ID}}
PATHS: "/*" PATHS: "/*"
AWS_REGION: "us-east-1" AWS_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: ${{ secrets.CLIENT_DEV_AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.DATA_DEV_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLIENT_DEV_AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.DATA_DEV_AWS_SECRET_ACCESS_KEY }}

View file

@ -4,7 +4,9 @@ on:
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }} group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true cancel-in-progress: true
env:
python-version: '3.10'
J40_VERSION_LABEL_STRING: ${{ vars.SCORE_VERSION }}
jobs: jobs:
# JOB to run change detection # JOB to run change detection
detect-be-changes: detect-be-changes:
@ -26,32 +28,29 @@ jobs:
- 'data/**' - 'data/**'
- '.github/workflows/pr_backend.yml' - '.github/workflows/pr_backend.yml'
code-quality-checks: code-quality-checks:
name: Code quality checks and tests - ${{ matrix.python-version }} name: Code quality checks and tests
needs: detect-be-changes needs: detect-be-changes
if: ${{ needs.detect-be-changes.outputs.backend == 'true' }} if: ${{ needs.detect-be-changes.outputs.backend == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
run: run:
working-directory: data/data-pipeline working-directory: data/data-pipeline
strategy:
matrix:
python-version: ['3.10']
environment: PR environment: PR
steps: steps:
- name: Checkout source - name: Checkout source
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Print variables to help debug - name: Print variables to help debug
uses: hmarr/debug-action@v3 uses: hmarr/debug-action@v3
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ env.python-version }}
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ env.python-version }}
- name: Load cached Poetry installation - name: Load cached Poetry installation
id: cached-poetry-dependencies id: cached-poetry-dependencies
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
path: ~/.cache/pypoetry/virtualenvs path: ~/.cache/pypoetry/virtualenvs
key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/deploy_backend_main.yml') }} key: env-${{ runner.os }}-${{ env.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/deploy_backend_main.yml') }}
- name: Install poetry - name: Install poetry
uses: snok/install-poetry@v1 uses: snok/install-poetry@v1
- name: Install dependencies - name: Install dependencies
@ -69,32 +68,29 @@ jobs:
run: | run: |
poetry run pytest data_pipeline/ poetry run pytest data_pipeline/
generate-score-tiles: generate-score-tiles:
name: Score and tile generation - ${{ matrix.python-version }} name: Score and tile generation
needs: detect-be-changes needs: detect-be-changes
if: ${{ needs.detect-be-changes.outputs.backend == 'true' }} if: ${{ needs.detect-be-changes.outputs.backend == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
run: run:
working-directory: data/data-pipeline working-directory: data/data-pipeline
strategy:
matrix:
python-version: ['3.10']
environment: PR environment: PR
steps: steps:
- name: Checkout source - name: Checkout source
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Print variables to help debug - name: Print variables to help debug
uses: hmarr/debug-action@v3 uses: hmarr/debug-action@v3
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ env.python-version }}
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ env.python-version }}
- name: Load cached Poetry installation - name: Load cached Poetry installation
id: cached-poetry-dependencies id: cached-poetry-dependencies
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
path: ~/.cache/pypoetry/virtualenvs path: ~/.cache/pypoetry/virtualenvs
key: env-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/deploy_backend_main.yml') }} key: env-${{ runner.os }}-${{ env.python-version }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('.github/workflows/deploy_backend_main.yml') }}
- name: Install poetry - name: Install poetry
uses: snok/install-poetry@v1 uses: snok/install-poetry@v1
- name: Print Poetry settings - name: Print Poetry settings
@ -113,12 +109,10 @@ jobs:
- name: Get Census Data - name: Get Census Data
run: | run: |
poetry run python3 -m data_pipeline.application census-data-download poetry run python3 -m data_pipeline.application census-data-download
- name: Extract Data Sources
run: |
poetry run python3 -m data_pipeline.application extract-data-sources
- name: Run ETL - name: Run ETL
run: | run: |
poetry run python3 -m data_pipeline.application etl-run poetry run python3 -m data_pipeline.application etl-run
poetry run python3 -m data_pipeline.application etl-run --dataset tribal
- name: Generate Score - name: Generate Score
run: | run: |
poetry run python3 -m data_pipeline.application score-run poetry run python3 -m data_pipeline.application score-run
@ -128,10 +122,6 @@ jobs:
- name: Generate Score Post - name: Generate Score Post
run: | run: |
poetry run python3 -m data_pipeline.application generate-score-post poetry run python3 -m data_pipeline.application generate-score-post
- name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' || env.J40_VERSION_LABEL_STRING == 'test' }}
run: |
grep "Identified as disadvantaged due to tribal overlap" data_pipeline/data/score/downloadable/* > /dev/null
- name: Confirm we generated the version of the score we think we did - name: Confirm we generated the version of the score we think we did
if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' || env.J40_VERSION_LABEL_STRING == 'beta' }} if: ${{ env.J40_VERSION_LABEL_STRING == '2.0' || env.J40_VERSION_LABEL_STRING == 'beta' }}
run: | run: |
@ -163,4 +153,5 @@ jobs:
- name: Generate Tiles - name: Generate Tiles
run: | run: |
poetry run python3 -m data_pipeline.application generate-map-tiles poetry run python3 -m data_pipeline.application generate-map-tiles
poetry run python3 -m data_pipeline.application generate-map-tiles --generate-tribal-layer

View file

@ -4,6 +4,8 @@ on:
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }} group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true cancel-in-progress: true
env:
node-version: 14.x
jobs: jobs:
# JOB to run change detection # JOB to run change detection
detect-fe-changes: detect-fe-changes:
@ -25,7 +27,7 @@ jobs:
- 'client/**' - 'client/**'
- '.github/workflows/pr_frontend.yml' - '.github/workflows/pr_frontend.yml'
frontend-build: frontend-build:
name: Frontend build - ${{ matrix.node-version }} name: Frontend build
needs: detect-fe-changes needs: detect-fe-changes
if: ${{ needs.detect-fe-changes.outputs.frontend == 'true' }} if: ${{ needs.detect-fe-changes.outputs.frontend == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -33,15 +35,12 @@ jobs:
defaults: defaults:
run: run:
working-directory: client working-directory: client
strategy:
matrix:
node-version: [14.x]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ env.node-version }}
uses: actions/setup-node@v2 uses: actions/setup-node@v2
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ env.node-version }}
- name: Install - name: Install
run: npm ci run: npm ci
- name: Build - name: Build

View file

@ -1,19 +1,23 @@
# WARNING: # WARNING:
# THIS FILE IS CHECKED INTO VERSION CONTROL! DO NOT ADD ANY SECRET INFO. IF SECRETS ARE ADDED, PLEASE ADD TO # GIT IGNORE FILE # THIS FILE IS CHECKED INTO VERSION CONTROL! DO NOT ADD ANY SECRET INFO. IF SECRETS ARE ADDED, PLEASE ADD TO # GIT IGNORE FILE
#
# Create an adjacent local .env file to override any settings
#
# Feature Tiles env variables: # Feature Tiles env variables:
# The TILES_BASE_URL will be determined by the DATA_SOURCE env variable # The TILES_BASE_URL will be determined by the DATA_SOURCE env variable
GATSBY_CDN_TILES_BASE_URL=https://dig0wsohit6js.cloudfront.net GATSBY_CDN_TILES_BASE_URL=https://static-data-screeningtool.geoplatform.gov
GATSBY_LOCAL_TILES_BASE_URL=http://localhost:5000/data/data-pipeline GATSBY_LOCAL_TILES_BASE_URL=http://localhost:5000/data/data-pipeline
GATSBY_DATA_PIPELINE_SCORE_PATH_LOCAL=data_pipeline/data/score GATSBY_DATA_PIPELINE_SCORE_PATH_LOCAL=data_pipeline/data/score
GATSBY_DATA_PIPELINE_TRIBAL_PATH=data-pipeline/data/tribal GATSBY_DATA_PIPELINE_TRIBAL_PATH_LOCAL=data_pipeline/data/tribal
GATSBY_2_0_TRIBAL_PATH=data-versions/2.0/data/tribal
GATSBY_BETA_SCORE_PATH = data-versions/beta/data/score GATSBY_BETA_SCORE_PATH=data-versions/beta/data/score
GATSBY_2_0_SCORE_PATH = data-versions/2.0/data/score GATSBY_2_0_SCORE_PATH=data-versions/2.0/data/score
GATSBY_DATA_PIPELINE_SEARCH_PATH_LOCAL = data_pipeline/data/score/search/tracts.json GATSBY_DATA_PIPELINE_SEARCH_PATH_LOCAL=data_pipeline/data/score/search/tracts.json
GATSBY_2_0_MAP_TRACT_SEARCH_PATH = data-versions/2.0/data/score/search/tracts.json GATSBY_2_0_MAP_TRACT_SEARCH_PATH=data-versions/2.0/data/score/search/tracts.json
GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_XLS=downloadable/beta-communities.xlsx GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_XLS=downloadable/beta-communities.xlsx
GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_CSV=downloadable/beta-communities.csv GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_CSV=downloadable/beta-communities.csv

View file

@ -1,16 +1,19 @@
# WARNING: # WARNING:
# THIS FILE IS CHECKED INTO VERSION CONTROL! DO NOT ADD ANY SECRET INFO. IF SECRETS ARE ADDED, PLEASE ADD TO # GIT IGNORE FILE # THIS FILE IS CHECKED INTO VERSION CONTROL! DO NOT ADD ANY SECRET INFO. IF SECRETS ARE ADDED, PLEASE ADD TO # GIT IGNORE FILE
#
# Create an adjacent local .env file to override any settings
#
# Feature Tiles env variables: # Feature Tiles env variables:
# The TILES_BASE_URL will always point to the CDN # The TILES_BASE_URL will always point to the CDN
GATSBY_CDN_TILES_BASE_URL=https://dig0wsohit6js.cloudfront.net GATSBY_CDN_TILES_BASE_URL=https://static-data-screeningtool.geoplatform.gov
GATSBY_DATA_PIPELINE_TRIBAL_PATH=data-pipeline/data/tribal GATSBY_2_0_TRIBAL_PATH=data-versions/2.0/data/tribal
GATSBY_BETA_SCORE_PATH = data-versions/beta/data/score GATSBY_BETA_SCORE_PATH=data-versions/beta/data/score
GATSBY_2_0_SCORE_PATH = data-versions/2.0/data/score GATSBY_2_0_SCORE_PATH=data-versions/2.0/data/score
GATSBY_2_0_MAP_TRACT_SEARCH_PATH = data-versions/2.0/data/score/search/tracts.json GATSBY_2_0_MAP_TRACT_SEARCH_PATH=data-versions/2.0/data/score/search/tracts.json
GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_XLS=downloadable/beta-communities.xlsx GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_XLS=downloadable/beta-communities.xlsx
GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_CSV=downloadable/beta-communities.csv GATSBY_FILE_DL_PATH_BETA_COMMUNITIES_LIST_CSV=downloadable/beta-communities.csv

View file

@ -1,7 +1,7 @@
[![Staging](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_staging.yml/badge.svg)](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_staging.yml) [![Staging](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_staging.yml/badge.svg)](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_staging.yml)
[![Production](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_main.yml/badge.svg)](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_main.yml) [![Production](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_main.yml/badge.svg)](https://github.com/usds/justice40-tool/actions/workflows/deploy_fe_main.yml)
# Justice40 Clientss # Justice40 Client
This README contains the following content: This README contains the following content:

View file

@ -1,13 +1,16 @@
require('dotenv').config({ const dotenv = require('dotenv');
// NODE_ENV is automatically set to
// 'development' when the app is launched via 'npm start' or 'npm develop'
// 'production' when the app is launched via 'npm build'
// Depending on the node environment, the app will then use // load .env first so any local settings take precedence over environmental defaults loaded next
// .env.production or .env.development for application dotenv.config();
// env variables.
path: `.env.${process.env.NODE_ENV}`, // NODE_ENV is automatically set to
}); // 'development' when the app is launched via 'npm start' or 'npm develop'
// 'production' when the app is launched via 'npm build'
// Depending on the node environment, the app will then use
// .env.production or .env.development for application
// env variables.
dotenv.config({path: `.env.${process.env.NODE_ENV}`});
module.exports = { module.exports = {
siteMetadata: { siteMetadata: {

View file

@ -1,6 +1,10 @@
path = require('path'); path = require('path');
// https://github.com/maplibre/maplibre-gl-js/issues/83#issuecomment-877012839 // This is require to use react-map-gl 6.x with maplibre
// See: https://github.com/visgl/react-map-gl/blob/v6.1.21/docs/get-started/get-started.md#using-with-a-mapbox-gl-fork
//
// In react-map-gl 7.x this is no longer needed: https://visgl.github.io/react-map-gl/docs/get-started
//
exports.onCreateWebpackConfig = ({stage, loaders, actions}) => { exports.onCreateWebpackConfig = ({stage, loaders, actions}) => {
actions.setWebpackConfig({ actions.setWebpackConfig({
devtool: 'eval-source-map', devtool: 'eval-source-map',

View file

@ -3203,6 +3203,12 @@
"resolved": "https://registry.npmjs.org/@types/js-cookie/-/js-cookie-2.2.7.tgz", "resolved": "https://registry.npmjs.org/@types/js-cookie/-/js-cookie-2.2.7.tgz",
"integrity": "sha512-aLkWa0C0vO5b4Sr798E26QgOkss68Un0bLjs7u9qxzPT5CG+8DuNTffWES58YzJs3hrVAOs1wonycqEBqNJubA==" "integrity": "sha512-aLkWa0C0vO5b4Sr798E26QgOkss68Un0bLjs7u9qxzPT5CG+8DuNTffWES58YzJs3hrVAOs1wonycqEBqNJubA=="
}, },
"@types/js-search": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/@types/js-search/-/js-search-1.4.4.tgz",
"integrity": "sha512-NYIBuSRTi2h6nLne0Ygx78BZaiT/q0lLU7YSkjOrDJWpSx6BioIZA/i2GZ+WmMUzEQs2VNIWcXRRAqisrG3ZNA==",
"dev": true
},
"@types/json-patch": { "@types/json-patch": {
"version": "0.0.30", "version": "0.0.30",
"resolved": "https://registry.npmjs.org/@types/json-patch/-/json-patch-0.0.30.tgz", "resolved": "https://registry.npmjs.org/@types/json-patch/-/json-patch-0.0.30.tgz",
@ -14087,36 +14093,6 @@
"object-visit": "^1.0.0" "object-visit": "^1.0.0"
} }
}, },
"mapbox-gl": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/mapbox-gl/-/mapbox-gl-1.13.2.tgz",
"integrity": "sha512-CPjtWygL+f7naL+sGHoC2JQR0DG7u+9ik6WdkjjVmz2uy0kBC2l+aKfdi3ZzUR7VKSQJ6Mc/CeCN+6iVNah+ww==",
"requires": {
"@mapbox/geojson-rewind": "^0.5.0",
"@mapbox/geojson-types": "^1.0.2",
"@mapbox/jsonlint-lines-primitives": "^2.0.2",
"@mapbox/mapbox-gl-supported": "^1.5.0",
"@mapbox/point-geometry": "^0.1.0",
"@mapbox/tiny-sdf": "^1.1.1",
"@mapbox/unitbezier": "^0.0.0",
"@mapbox/vector-tile": "^1.3.1",
"@mapbox/whoots-js": "^3.1.0",
"csscolorparser": "~1.0.3",
"earcut": "^2.2.2",
"geojson-vt": "^3.2.1",
"gl-matrix": "^3.2.1",
"grid-index": "^1.1.0",
"minimist": "^1.2.5",
"murmurhash-js": "^1.0.0",
"pbf": "^3.2.1",
"potpack": "^1.0.1",
"quickselect": "^2.0.0",
"rw": "^1.3.3",
"supercluster": "^7.1.0",
"tinyqueue": "^2.0.3",
"vt-pbf": "^3.1.1"
}
},
"maplibre-gl": { "maplibre-gl": {
"version": "1.14.0", "version": "1.14.0",
"resolved": "https://registry.npmjs.org/maplibre-gl/-/maplibre-gl-1.14.0.tgz", "resolved": "https://registry.npmjs.org/maplibre-gl/-/maplibre-gl-1.14.0.tgz",

View file

@ -39,6 +39,7 @@
"@testing-library/react": "^11.2.7", "@testing-library/react": "^11.2.7",
"@types/d3-ease": "^3.0.0", "@types/d3-ease": "^3.0.0",
"@types/jest": "^26.0.24", "@types/jest": "^26.0.24",
"@types/js-search": "^1.4.4",
"@types/maplibre-gl": "^1.14.0", "@types/maplibre-gl": "^1.14.0",
"@types/node": "^15.14.9", "@types/node": "^15.14.9",
"@types/react": "^17.0.41", "@types/react": "^17.0.41",
@ -86,7 +87,6 @@
"gatsby-plugin-robots-txt": "^1.7.0", "gatsby-plugin-robots-txt": "^1.7.0",
"gatsby-plugin-sitemap": "^4.10.0", "gatsby-plugin-sitemap": "^4.10.0",
"js-search": "^2.0.1", "js-search": "^2.0.1",
"mapbox-gl": "^1.13.2",
"maplibre-gl": "^1.14.0", "maplibre-gl": "^1.14.0",
"query-string": "^7.1.3", "query-string": "^7.1.3",
"react": "^17.0.2", "react": "^17.0.2",

View file

@ -19,7 +19,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
data-testid="grid" data-testid="grid"
> >
<h2> <h2>
Datasets used in v1.0 methodology Datasets used in v2.0 methodology
</h2> </h2>
</div> </div>
</div> </div>

View file

@ -14,7 +14,7 @@ import SurveyButton from '../SurveyButton';
// @ts-ignore // @ts-ignore
import whitehouseIcon from '../../images/eop-seal.svg'; import whitehouseIcon from '../../images/eop-seal.svg';
import * as COMMON_COPY from '../../data/copy/common'; import * as COMMON_COPY from '../../data/copy/common';
import {GITHUB_LINK, GITHUB_LINK_ES} from '../../../src/data/copy/about'; import {GITHUB_LINK, GITHUB_LINK_ES} from '../../constants';
const J40Footer = () => { const J40Footer = () => {
const intl = useIntl(); const intl = useIntl();

View file

@ -161,7 +161,7 @@ exports[`J40Footer renders correctly 1`] = `
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -2,7 +2,7 @@
/* eslint-disable no-unused-vars */ /* eslint-disable no-unused-vars */
// External Libs: // External Libs:
import React, {useRef, useState} from 'react'; import React, {useRef, useState} from 'react';
import {Map, MapboxGeoJSONFeature, LngLatBoundsLike} from 'maplibre-gl'; import {Map, MapGeoJSONFeature, LngLatBoundsLike} from 'maplibre-gl';
import ReactMapGL, { import ReactMapGL, {
MapEvent, MapEvent,
ViewportProps, ViewportProps,
@ -28,9 +28,8 @@ import AreaDetail from './AreaDetail';
import MapInfoPanel from './mapInfoPanel'; import MapInfoPanel from './mapInfoPanel';
import MapSearch from './MapSearch'; import MapSearch from './MapSearch';
import MapTractLayers from './MapTractLayers/MapTractLayers'; import MapTractLayers from './MapTractLayers/MapTractLayers';
// import MapTribalLayer from './MapTribalLayers/MapTribalLayers'; import MapTribalLayer from './MapTribalLayers/MapTribalLayers';
import TerritoryFocusControl from './territoryFocusControl'; import TerritoryFocusControl from './territoryFocusControl';
import {getOSBaseMap} from '../data/getOSBaseMap';
// Styles and constants // Styles and constants
import 'maplibre-gl/dist/maplibre-gl.css'; import 'maplibre-gl/dist/maplibre-gl.css';
@ -57,6 +56,13 @@ export interface IDetailViewInterface {
properties: constants.J40Properties, properties: constants.J40Properties,
}; };
export interface IMapFeature {
id: string;
geometry: any;
properties: any;
type: string;
}
const J40Map = ({location}: IJ40Interface) => { const J40Map = ({location}: IJ40Interface) => {
/** /**
* Initializes the zoom, and the map's center point (lat, lng) via the URL hash #{z}/{lat}/{long} * Initializes the zoom, and the map's center point (lat, lng) via the URL hash #{z}/{lat}/{long}
@ -80,7 +86,7 @@ const J40Map = ({location}: IJ40Interface) => {
zoom: zoom && parseFloat(zoom) ? parseFloat(zoom) : constants.GLOBAL_MIN_ZOOM, zoom: zoom && parseFloat(zoom) ? parseFloat(zoom) : constants.GLOBAL_MIN_ZOOM,
}); });
const [selectedFeature, setSelectedFeature] = useState<MapboxGeoJSONFeature>(); const [selectedFeature, setSelectedFeature] = useState<MapGeoJSONFeature>();
const [detailViewData, setDetailViewData] = useState<IDetailViewInterface>(); const [detailViewData, setDetailViewData] = useState<IDetailViewInterface>();
const [transitionInProgress, setTransitionInProgress] = useState<boolean>(false); const [transitionInProgress, setTransitionInProgress] = useState<boolean>(false);
const [geolocationInProgress, setGeolocationInProgress] = useState<boolean>(false); const [geolocationInProgress, setGeolocationInProgress] = useState<boolean>(false);
@ -108,17 +114,60 @@ const J40Map = ({location}: IJ40Interface) => {
const zoomLatLngHash = mapRef.current?.getMap()._hash._getCurrentHash(); const zoomLatLngHash = mapRef.current?.getMap()._hash._getCurrentHash();
/** /**
* This function will return the bounding box of the current map. Comment in when needed. * Selects the provided feature on the map.
* { * @param feature the feature to select
* _ne: {lng:number, lat:number}
* _sw: {lng:number, lat:number}
* }
* @returns {LngLatBounds}
*/ */
// const getCurrentMapBoundingBox = () => { const selectFeatureOnMap = (feature: IMapFeature) => {
// return mapRef.current ? console.log('mapRef getBounds(): ', mapRef.current.getMap().getBounds()) : null; if (feature) {
// }; // Get the current selected feature's bounding box:
const [minLng, minLat, maxLng, maxLat] = bbox(feature);
// Set the selectedFeature ID
if (feature.id !== selectedFeatureId) {
setSelectedFeature(feature);
} else {
setSelectedFeature(undefined);
}
// Go to the newly selected feature (as long as it's not an Alaska Point)
goToPlace([
[minLng, minLat],
[maxLng, maxLat],
]);
/**
* The following logic is used for the popup for the fullscreen feature
*/
// Create a new viewport using the current viewport dimnesions:
const newViewPort = new WebMercatorViewport({height: viewport.height!, width: viewport.width!});
// Fit the viewport to the new bounds and return a long, lat and zoom:
const {longitude, latitude, zoom} = newViewPort.fitBounds(
[
[minLng, minLat],
[maxLng, maxLat],
],
{
padding: 40,
},
);
// Save the popupInfo
const popupInfo = {
longitude: longitude,
latitude: latitude,
zoom: zoom,
properties: feature.properties,
};
// Update the DetailedView state variable with the new popupInfo object:
setDetailViewData(popupInfo);
/**
* End Fullscreen feature specific logic
*/
}
};
/** /**
* This onClick event handler will listen and handle clicks on the map. It will listen for clicks on the * This onClick event handler will listen and handle clicks on the map. It will listen for clicks on the
@ -174,58 +223,7 @@ const J40Map = ({location}: IJ40Interface) => {
// @ts-ignore // @ts-ignore
const feature = event.features && event.features[0]; const feature = event.features && event.features[0];
if (feature) { selectFeatureOnMap(feature);
// Get the current selected feature's bounding box:
const [minLng, minLat, maxLng, maxLat] = bbox(feature);
// Set the selectedFeature ID
if (feature.id !== selectedFeatureId) {
setSelectedFeature(feature);
} else {
setSelectedFeature(undefined);
}
// Go to the newly selected feature (as long as it's not an Alaska Point)
goToPlace([
[minLng, minLat],
[maxLng, maxLat],
]);
/**
* The following logic is used for the popup for the fullscreen feature
*/
// Create a new viewport using the current viewport dimnesions:
const newViewPort = new WebMercatorViewport({height: viewport.height!, width: viewport.width!});
// Fit the viewport to the new bounds and return a long, lat and zoom:
const {longitude, latitude, zoom} = newViewPort.fitBounds(
[
[minLng, minLat],
[maxLng, maxLat],
],
{
padding: 40,
},
);
// Save the popupInfo
const popupInfo = {
longitude: longitude,
latitude: latitude,
zoom: zoom,
properties: feature.properties,
};
// Update the DetailedView state variable with the new popupInfo object:
setDetailViewData(popupInfo);
/**
* End Fullscreen feature specific logic
*/
}
} }
}; };
@ -304,11 +302,6 @@ const J40Map = ({location}: IJ40Interface) => {
setGeolocationInProgress(true); setGeolocationInProgress(true);
}; };
const mapBoxBaseLayer = {
customColorsWithUpdatedTribal: `mapbox://styles/justice40/cl9g30qh7000p15l9cp1ftw16`,
streetsWithUpdatedTribal: `mapbox://styles/justice40/cl98rlidr002c14obpsvz6zzs`,
};
return ( return (
<> <>
@ -348,8 +341,11 @@ const J40Map = ({location}: IJ40Interface) => {
// ****** Map state props: ****** // ****** Map state props: ******
// http://visgl.github.io/react-map-gl/docs/api-reference/interactive-map#map-state // http://visgl.github.io/react-map-gl/docs/api-reference/interactive-map#map-state
{...viewport} {...viewport}
mapStyle={process.env.MAPBOX_STYLES_READ_TOKEN ? mapStyle={
mapBoxBaseLayer.customColorsWithUpdatedTribal : getOSBaseMap()} process.env.MAPBOX_STYLES_READ_TOKEN ?
'mapbox://styles/justice40/cl9g30qh7000p15l9cp1ftw16' :
'https://basemaps.cartocdn.com/gl/voyager-gl-style/style.json'
}
width="100%" width="100%"
// Ajusting this height with a conditional statement will not render the map on staging. // Ajusting this height with a conditional statement will not render the map on staging.
// The reason for this issue is unknown. Consider styling the parent container via SASS. // The reason for this issue is unknown. Consider styling the parent container via SASS.
@ -383,6 +379,13 @@ const J40Map = ({location}: IJ40Interface) => {
data-cy={'reactMapGL'} data-cy={'reactMapGL'}
> >
{ /* Tribal layer is baked into Mapbox source,
* only render here if we're not using that
**/
process.env.MAPBOX_STYLES_READ_TOKEN ||
<MapTribalLayer />
}
<MapTractLayers <MapTractLayers
selectedFeature={selectedFeature} selectedFeature={selectedFeature}
selectedFeatureId={selectedFeatureId} selectedFeatureId={selectedFeatureId}
@ -390,7 +393,8 @@ const J40Map = ({location}: IJ40Interface) => {
{/* This is the first overlayed row on the map: Search and Geolocation */} {/* This is the first overlayed row on the map: Search and Geolocation */}
<div className={styles.mapHeaderRow}> <div className={styles.mapHeaderRow}>
<MapSearch goToPlace={goToPlace}/> <MapSearch goToPlace={goToPlace} mapRef={mapRef} selectFeatureOnMap={selectFeatureOnMap}
selectedFeatureId={selectedFeatureId}/>
{/* Geolocate Icon */} {/* Geolocate Icon */}
<div className={styles.geolocateBox}> <div className={styles.geolocateBox}>

View file

@ -4,6 +4,8 @@ import {LngLatBoundsLike} from 'maplibre-gl';
import {useIntl} from 'gatsby-plugin-intl'; import {useIntl} from 'gatsby-plugin-intl';
import {Search} from '@trussworks/react-uswds'; import {Search} from '@trussworks/react-uswds';
import {useWindowSize} from 'react-use'; import {useWindowSize} from 'react-use';
import {RefObject} from 'react';
import {MapRef} from 'react-map-gl';
import * as JsSearch from 'js-search'; import * as JsSearch from 'js-search';
import * as constants from '../../data/constants'; import * as constants from '../../data/constants';
@ -14,19 +16,18 @@ import * as EXPLORE_COPY from '../../data/copy/explore';
interface IMapSearch { interface IMapSearch {
goToPlace(bounds: LngLatBoundsLike):void; goToPlace(bounds: LngLatBoundsLike):void;
mapRef:RefObject<MapRef>;
selectFeatureOnMap: (feature: any) => void;
selectedFeatureId: string;
} }
interface ISearchResult { interface ISearchTractRecord {
addresstype: string; GEOID10: string;
lat: string; INTPTLAT10: string;
lon: string; INTPTLON10: string;
boundingbox: string[];
type: string;
// eslint-disable-next-line camelcase
place_rank: number;
} }
const MapSearch = ({goToPlace}:IMapSearch) => { const MapSearch = ({goToPlace, mapRef, selectFeatureOnMap, selectedFeatureId}:IMapSearch) => {
// State to hold if the search results are empty or not: // State to hold if the search results are empty or not:
const [isSearchResultsNull, setIsSearchResultsNull] = useState(false); const [isSearchResultsNull, setIsSearchResultsNull] = useState(false);
const intl = useIntl(); const intl = useIntl();
@ -42,7 +43,7 @@ const MapSearch = ({goToPlace}:IMapSearch) => {
*/ */
const {width, height} = useWindowSize(); const {width, height} = useWindowSize();
const [placeholderText, setPlaceholderText]= useState(EXPLORE_COPY.MAP.SEARCH_PLACEHOLDER); const [placeholderText, setPlaceholderText]= useState(EXPLORE_COPY.MAP.SEARCH_PLACEHOLDER);
const [tractSearch, setTractSearch] = useState<JsSearch | null>(null); const [tractSearch, setTractSearch] = useState<JsSearch.Search | null>(null);
/** /**
* Gets the tract search data and loads in the state. * Gets the tract search data and loads in the state.
@ -80,76 +81,93 @@ const MapSearch = ({goToPlace}:IMapSearch) => {
/** /**
* Searchs for a given Census tract ID. * Searchs for a given Census tract ID.
* @param {string} tract the 11 digit tract ID as a string * @param {string} tract the 11 digit tract ID as a string
* @return {Array} an array of one search result, or null if no result found
*/ */
const searchForTract = (tract: string): [ISearchResult] | [] => { const searchForTract = async (tract: string) => {
// We create a bounding box just to get the tract in the view box. // We create a bounding box just to get the tract in the view box.
// The size is not important. // The size is not important.
const BOUNDING_BOX_SIZE_DD = 0.2; const BOUNDING_BOX_SIZE_DD = 0.1;
/**
* Wait for the map to be done loading and moving.
* @param {function()} callback the callback to run after the map is ready
*/
const waitforMap = (callback: () => void): void => {
const isMapReady = !!mapRef.current &&
mapRef.current.getMap().isStyleLoaded() &&
mapRef.current.getMap().isSourceLoaded(constants.HIGH_ZOOM_SOURCE_NAME);
if (isMapReady) {
callback();
} else {
setTimeout(() => waitforMap(callback), 200);
}
};
// Convert 10 digit tracts to 11.
const searchTerm = tract.length == 10 ? '0' + tract : tract;
// If the search is for the same tract then do nothing.
if (selectedFeatureId == searchTerm) return;
setIsSearchResultsNull(true);
if (tractSearch) { if (tractSearch) {
// Convert 10 digit tracts to 11.
const searchTerm = tract.length == 10 ? '0' + tract : tract;
const result = tractSearch.search(searchTerm); const result = tractSearch.search(searchTerm);
if (result.length > 0) { if (result.length > 0) {
const lat = Number(result[0].INTPTLAT10); const searchTractRecord = result[0] as ISearchTractRecord;
const lon = Number(result[0].INTPTLON10); const lat = Number(searchTractRecord.INTPTLAT10);
return [{ const lon = Number(searchTractRecord.INTPTLON10);
addresstype: 'tract', const boundingBox = [
boundingbox: [ (lat - (BOUNDING_BOX_SIZE_DD / 2)).toString(),
(lat - (BOUNDING_BOX_SIZE_DD / 2)).toString(), (lat + (BOUNDING_BOX_SIZE_DD / 2)).toString(),
(lat + (BOUNDING_BOX_SIZE_DD / 2)).toString(), (lon - (BOUNDING_BOX_SIZE_DD / 2)).toString(),
(lon - (BOUNDING_BOX_SIZE_DD / 2)).toString(), (lon + (BOUNDING_BOX_SIZE_DD / 2)).toString(),
(lon + (BOUNDING_BOX_SIZE_DD / 2)).toString(), ];
], const [latMin, latMax, longMin, longMax] = boundingBox;
lat: result[0].INTPTLAT10, setIsSearchResultsNull(false);
lon: result[0].INTPTLON10,
type: 'tract', // Now move the map and select the tract.
place_rank: 1, goToPlace([[Number(longMin), Number(latMin)], [Number(longMax), Number(latMax)]]);
}]; waitforMap(() => {
// Set up a one-shot event handler to fire when the flyTo arrives at its destination. Once the
// tract is in view of the map. mpRef.current will always be valid here...
mapRef.current?.getMap().once('idle', () => {
const geoidSearchResults = mapRef.current?.getMap().querySourceFeatures(constants.HIGH_ZOOM_SOURCE_NAME, {
sourceLayer: constants.SCORE_SOURCE_LAYER,
validate: true,
filter: ['==', constants.GEOID_PROPERTY, searchTerm],
});
if (geoidSearchResults && geoidSearchResults.length > 0) {
selectFeatureOnMap(geoidSearchResults[0]);
}
});
});
} }
} }
return [];
}; };
/* /**
onSearchHandler will * Searchs for a given location such as address, zip, etc. This method will
1. extract the search term from the input field * will fetch data from the PSM API and return the results as JSON and
2. Determine if the search term is a Census Tract or not. * results to US only. If the data is valid, destructure the boundingBox
3. If it is a Census Tract, it will search the tract table for a bounding box. * values from the search results. Finally, is pans the map to the location.
4. If it is NOT a Census Tract, it will fetch data from the API and return the * @param {string} searchTerm the location to search for
results as JSON and results to US only. If data is valid, destructure the */
boundingBox values from the search results. const searchForLocation = async (searchTerm: string) => {
4. Pan the map to that location const searchResults = await fetch(
*/ `https://nominatim.openstreetmap.org/search?q=${searchTerm}&format=json&countrycodes=us`,
const onSearchHandler = async (event: React.FormEvent<HTMLFormElement>) => { {
event.preventDefault(); mode: 'cors',
event.stopPropagation(); })
.then((response) => {
const searchTerm = (event.currentTarget.elements.namedItem('search') as HTMLInputElement).value; if (!response.ok) {
let searchResults = null; throw new Error('Network response was not OK');
}
// If the search term a Census tract return response.json();
const isTract = /^\d{10,11}$/.test(searchTerm); })
if (isTract) { .catch((error) => {
setIsSearchResultsNull(false); console.error('There has been a problem with your fetch operation:', error);
searchResults = searchForTract(searchTerm); });
} else { console.log('Nominatum search results: ', searchResults);
searchResults = await fetch(
`https://nominatim.openstreetmap.org/search?q=${searchTerm}&format=json&countrycodes=us`,
{
mode: 'cors',
})
.then((response) => {
if (!response.ok) {
throw new Error('Network response was not OK');
}
return response.json();
})
.catch((error) => {
console.error('There has been a problem with your fetch operation:', error);
});
console.log('Nominatum search results: ', searchResults);
}
// If results are valid, set isSearchResultsNull to false and pan map to location: // If results are valid, set isSearchResultsNull to false and pan map to location:
if (searchResults && searchResults.length > 0) { if (searchResults && searchResults.length > 0) {
@ -161,6 +179,25 @@ const MapSearch = ({goToPlace}:IMapSearch) => {
} }
}; };
/**
Searches for a given search term upon clicking on the search button.
@param {React.FormEvent<HTMLFormElement>} event the click event
*/
const onSearchHandler = async (event: React.FormEvent<HTMLFormElement>) => {
event.preventDefault();
event.stopPropagation();
const searchTerm = (event.currentTarget.elements.namedItem('search') as HTMLInputElement).value;
// If the search term a Census tract
const isTract = /^\d{10,11}$/.test(searchTerm);
if (isTract) {
searchForTract(searchTerm);
} else {
searchForLocation(searchTerm);
}
};
return ( return (
<div className={styles.mapSearchContainer}> <div className={styles.mapSearchContainer}>
<MapSearchMessage isSearchResultsNull={isSearchResultsNull} /> <MapSearchMessage isSearchResultsNull={isSearchResultsNull} />

View file

@ -1,6 +1,6 @@
import React, {useMemo} from 'react'; import React, {useMemo} from 'react';
import {Source, Layer} from 'react-map-gl'; import {Source, Layer} from 'react-map-gl';
import {AnyLayer} from 'mapbox-gl'; import {MapGeoJSONFeature} from 'maplibre-gl';
// Contexts: // Contexts:
import {useFlags} from '../../contexts/FlagContext'; import {useFlags} from '../../contexts/FlagContext';
@ -9,8 +9,8 @@ import * as constants from '../../data/constants';
import * as COMMON_COPY from '../../data/copy/common'; import * as COMMON_COPY from '../../data/copy/common';
interface IMapTractLayers { interface IMapTractLayers {
selectedFeatureId: AnyLayer, selectedFeatureId: string | number,
selectedFeature: AnyLayer, selectedFeature: MapGeoJSONFeature | undefined,
} }
/** /**
@ -60,8 +60,8 @@ export const featureURLForTilesetName = (tilesetName: string): string => {
* only the interactive layers are returned from this component. The reason being is that the * only the interactive layers are returned from this component. The reason being is that the
* other layers are supplied by he getOSBaseMap function. * other layers are supplied by he getOSBaseMap function.
* *
* @param {AnyLayer} selectedFeatureId * @param {string | number} selectedFeatureId
* @param {AnyLayer} selectedFeature * @param {MapGeoJSONFeature | undefined} selectedFeature
* @return {Style} * @return {Style}
*/ */
const MapTractLayers = ({ const MapTractLayers = ({
@ -70,9 +70,7 @@ const MapTractLayers = ({
}: IMapTractLayers) => { }: IMapTractLayers) => {
const filter = useMemo(() => ['in', constants.GEOID_PROPERTY, selectedFeatureId], [selectedFeature]); const filter = useMemo(() => ['in', constants.GEOID_PROPERTY, selectedFeatureId], [selectedFeature]);
return process.env.MAPBOX_STYLES_READ_TOKEN ? ( return (
// In this case the MapBox token is found and All source(s)/layer(s) are returned.
<> <>
<Source <Source
id={constants.LOW_ZOOM_SOURCE_NAME} id={constants.LOW_ZOOM_SOURCE_NAME}
@ -160,34 +158,6 @@ const MapTractLayers = ({
/> />
</Source> </Source>
</> </>
): (
/**
* In this case the MapBox token is NOT found and ONLY interactive source(s)/layer(s) are returned
* In this case, the other layers (non-interactive) are provided by getOSBaseMap
*/
<Source
id={constants.HIGH_ZOOM_SOURCE_NAME}
type="vector"
promoteId={constants.GEOID_PROPERTY}
tiles={[featureURLForTilesetName('high')]}
maxzoom={constants.GLOBAL_MAX_ZOOM_HIGH}
minzoom={constants.GLOBAL_MIN_ZOOM_HIGH}
>
{/* High zoom layer (dynamic) - border styling around the selected feature */}
<Layer
id={constants.SELECTED_FEATURE_BORDER_LAYER_ID}
source-layer={constants.SCORE_SOURCE_LAYER}
filter={filter} // This filter filters out all other features except the selected feature.
type='line'
paint={{
'line-color': constants.SELECTED_FEATURE_BORDER_COLOR,
'line-width': constants.SELECTED_FEATURE_BORDER_WIDTH,
}}
minzoom={constants.GLOBAL_MIN_ZOOM_HIGH}
/>
</Source>
); );
}; };

View file

@ -1,24 +1,20 @@
import React, {useMemo} from 'react'; import React from 'react';
import {Source, Layer} from 'react-map-gl'; import {Source, Layer} from 'react-map-gl';
import {AnyLayer} from 'mapbox-gl';
import * as constants from '../../data/constants'; import * as constants from '../../data/constants';
interface IMapTribalLayers {
selectedFeatureId: AnyLayer,
selectedFeature: AnyLayer,
}
/** /**
* This function will determine the URL for the tribal tiles. * This function will determine the URL for the tribal tiles.
* @return {string} * @return {string}
*/ */
export const tribalURL = (): string => { export const tribalURL = (): string => {
const featureTileBaseURL = constants.TILE_BASE_URL;
const featureTilePath = constants.GATSBY_DATA_PIPELINE_TRIBAL_PATH;
const XYZ_SUFFIX = '{z}/{x}/{y}.pbf'; const XYZ_SUFFIX = '{z}/{x}/{y}.pbf';
return [ return [
process.env.GATSBY_CDN_TILES_BASE_URL, featureTileBaseURL,
process.env.GATSBY_DATA_PIPELINE_TRIBAL_PATH, featureTilePath,
process.env.GATSBY_MAP_TILES_PATH, process.env.GATSBY_MAP_TILES_PATH,
XYZ_SUFFIX, XYZ_SUFFIX,
].join('/'); ].join('/');
@ -34,17 +30,12 @@ export const tribalURL = (): string => {
* only the interactive layers are returned from this component. The reason being is that the * only the interactive layers are returned from this component. The reason being is that the
* other layers are supplied by he getOSBaseMap function. * other layers are supplied by he getOSBaseMap function.
* *
* @param {AnyLayer} selectedFeatureId * @param {string | number} selectedFeatureId
* @param {AnyLayer} selectedFeature * @param {MapGeoJSONFeature | undefined} selectedFeature
* @return {Style} * @return {Style}
*/ */
const MapTribalLayer = ({ const MapTribalLayer = () => {
selectedFeatureId, return (
selectedFeature,
}: IMapTribalLayers) => {
const tribalSelectionFilter = useMemo(() => ['in', constants.TRIBAL_ID, selectedFeatureId], [selectedFeature]);
return process.env.MAPBOX_STYLES_READ_TOKEN ? (
// In this case the MapBox token is found and ALL source(s)/layer(s) are returned. // In this case the MapBox token is found and ALL source(s)/layer(s) are returned.
<Source <Source
@ -52,8 +43,6 @@ const MapTribalLayer = ({
type="vector" type="vector"
promoteId={constants.TRIBAL_ID} promoteId={constants.TRIBAL_ID}
tiles={[tribalURL()]} tiles={[tribalURL()]}
minzoom={constants.TRIBAL_MIN_ZOOM}
maxzoom={constants.TRIBAL_MAX_ZOOM}
> >
{/* Tribal layer */} {/* Tribal layer */}
@ -82,19 +71,6 @@ const MapTribalLayer = ({
maxzoom={constants.TRIBAL_MAX_ZOOM} maxzoom={constants.TRIBAL_MAX_ZOOM}
/> />
{/* Tribal layer - border styling around the selected feature */}
<Layer
id={constants.SELECTED_TRIBAL_FEATURE_BORDER_LAYER_ID}
source-layer={constants.TRIBAL_SOURCE_LAYER}
filter={tribalSelectionFilter}
type='line'
paint={{
'line-color': constants.SELECTED_FEATURE_BORDER_COLOR,
'line-width': constants.SELECTED_FEATURE_BORDER_WIDTH,
}}
minzoom={constants.TRIBAL_MIN_ZOOM}
/>
{/* Alaska layer */} {/* Alaska layer */}
<Layer <Layer
id={constants.TRIBAL_ALASKA_POINTS_LAYER_ID} id={constants.TRIBAL_ALASKA_POINTS_LAYER_ID}
@ -103,40 +79,47 @@ const MapTribalLayer = ({
type='circle' type='circle'
paint={{ paint={{
'circle-radius': constants.TRIBAL_ALASKA_CIRCLE_RADIUS, 'circle-radius': constants.TRIBAL_ALASKA_CIRCLE_RADIUS,
'circle-color': constants.PRIORITIZED_FEATURE_FILL_COLOR, 'circle-color': constants.TRIBAL_ALASKA_CIRCLE_FILL_COLOR,
'circle-opacity': constants.TRIBAL_FEATURE_FILL_OPACITY,
'circle-stroke-color': constants.TRIBAL_BORDER_COLOR,
'circle-stroke-width': constants.ALAKSA_POINTS_STROKE_WIDTH,
'circle-stroke-opacity': constants.FEATURE_BORDER_OPACITY,
}}
minzoom={constants.ALASKA_MIN_ZOOM}
maxzoom={constants.ALASKA_MAX_ZOOM}
/>
{/* Tribal labels layer */}
<Layer
id={constants.TRIBAL_LABELS_LAYER_ID}
source-layer={constants.TRIBAL_SOURCE_LAYER}
type='symbol'
layout={{
'text-field': [
'case',
['in', ' LAR', ['get', constants.LAND_AREA_NAME]],
['slice', ['get', constants.LAND_AREA_NAME], 0, ['-', ['length', ['get', constants.LAND_AREA_NAME]], 4]],
['in', ' IRA', ['get', constants.LAND_AREA_NAME]],
['slice', ['get', constants.LAND_AREA_NAME], 0, ['-', ['length', ['get', constants.LAND_AREA_NAME]], 4]],
['in', ' TSA', ['get', constants.LAND_AREA_NAME]],
['slice', ['get', constants.LAND_AREA_NAME], 0, ['-', ['length', ['get', constants.LAND_AREA_NAME]], 4]],
['get', constants.LAND_AREA_NAME],
],
'text-anchor': 'top',
'text-offset': [0, 1],
'text-size': 12,
'text-allow-overlap': false,
'text-ignore-placement': false,
}}
paint={{
'text-color': '#333333',
'text-halo-color': '#FFFFFF',
'text-halo-width': 1.5,
}} }}
minzoom={constants.TRIBAL_MIN_ZOOM} minzoom={constants.TRIBAL_MIN_ZOOM}
maxzoom={constants.TRIBAL_MAX_ZOOM} maxzoom={constants.TRIBAL_MAX_ZOOM}
/> />
</Source> </Source>
) : (
/**
* In this case the MapBox token is NOT found and ONLY INTERACTIVE source(s)/layer(s) are returned.
* In this case, the other layers (non-interactive) are provided by getOSBaseMap
*/
<Source
id={constants.TRIBAL_SOURCE_NAME}
type="vector"
promoteId={constants.TRIBAL_ID}
tiles={[tribalURL()]}
minzoom={constants.TRIBAL_MIN_ZOOM}
maxzoom={constants.TRIBAL_MAX_ZOOM}
>
{/* Tribal layer - border styling around the selected feature */}
<Layer
id={constants.SELECTED_TRIBAL_FEATURE_BORDER_LAYER_ID}
source-layer={constants.TRIBAL_SOURCE_LAYER}
filter={tribalSelectionFilter}
type='line'
paint={{
'line-color': constants.SELECTED_FEATURE_BORDER_COLOR,
'line-width': constants.SELECTED_FEATURE_BORDER_WIDTH,
}}
minzoom={constants.TRIBAL_MIN_ZOOM}
/>
</Source>
); );
}; };

View file

@ -26,7 +26,7 @@ exports[`rendering of ReleaseUpdate Component checks if component renders 1`] =
> >
<div> <div>
<div> <div>
Version 1.0 Release update - Nov 22, 2022 Version 2.0 Release update - Nov 22, 2022
</div> </div>
<div> <div>
New & improved New & improved
@ -237,7 +237,7 @@ exports[`rendering of ReleaseUpdate Component checks if component renders 1`] =
</ul> </ul>
</div> </div>
<div> <div>
release version 1.0 release version 2.0
</div> </div>
</div> </div>
</section> </section>

2
client/src/constants.tsx Normal file
View file

@ -0,0 +1,2 @@
export const GITHUB_LINK = 'https://github.com/DOI-DO/ceq-j40-cejst-2';
export const GITHUB_LINK_ES = `${GITHUB_LINK}/blob/main/README-es.md`;

View file

@ -261,10 +261,10 @@ export const LOW_ZOOM_LAYER_ID = "low-zoom-layer-id";
export const FEATURE_BORDER_LAYER_ID = "feature-border-layer-id"; export const FEATURE_BORDER_LAYER_ID = "feature-border-layer-id";
export const SELECTED_FEATURE_BORDER_LAYER_ID = export const SELECTED_FEATURE_BORDER_LAYER_ID =
"selected-feature-border-layer-id"; "selected-feature-border-layer-id";
export const TRIBAL_LAYER_ID = "tribal-layer-id"; export const TRIBAL_LAYER_ID = "tribal-layer-id";
export const SELECTED_TRIBAL_FEATURE_BORDER_LAYER_ID =
"selected-feature-tribal-border-layer-id";
export const TRIBAL_ALASKA_POINTS_LAYER_ID = "tribal-alaska-points-layer-id"; export const TRIBAL_ALASKA_POINTS_LAYER_ID = "tribal-alaska-points-layer-id";
export const TRIBAL_LABELS_LAYER_ID = "tribal-labels-layer-id";
// Used in layer filters: // Used in layer filters:
export const SCORE_PROPERTY_LOW = "SCORE"; export const SCORE_PROPERTY_LOW = "SCORE";
@ -282,8 +282,10 @@ export const GLOBAL_MAX_ZOOM_HIGH = 11;
export const GLOBAL_MIN_ZOOM_FEATURE_BORDER = 5; export const GLOBAL_MIN_ZOOM_FEATURE_BORDER = 5;
export const GLOBAL_MAX_ZOOM_FEATURE_BORDER = 22; export const GLOBAL_MAX_ZOOM_FEATURE_BORDER = 22;
export const TRIBAL_MIN_ZOOM = 3; export const TRIBAL_MIN_ZOOM = 6.6;
export const TRIBAL_MAX_ZOOM = 22; export const TRIBAL_MAX_ZOOM = 22;
export const ALASKA_MIN_ZOOM = 3;
export const ALASKA_MAX_ZOOM = 22;
// Opacity // Opacity
export const FEATURE_BORDER_OPACITY = 0.5; export const FEATURE_BORDER_OPACITY = 0.5;
@ -297,7 +299,7 @@ export const FEATURE_BORDER_COLOR = "#4EA5CF";
export const SELECTED_FEATURE_BORDER_COLOR = "#1A4480"; export const SELECTED_FEATURE_BORDER_COLOR = "#1A4480";
export const PRIORITIZED_FEATURE_FILL_COLOR = "#768FB3"; export const PRIORITIZED_FEATURE_FILL_COLOR = "#768FB3";
export const TRIBAL_BORDER_COLOR = "##4EA5CF"; export const TRIBAL_BORDER_COLOR = "#4EA5CF";
export const SELECTED_TRIBAL_BORDER_COLOR = "#1A4480"; export const SELECTED_TRIBAL_BORDER_COLOR = "#1A4480";
export const TRIBAL_FILL_COLOR = "#768FB3"; export const TRIBAL_FILL_COLOR = "#768FB3";
export const TRIBAL_ALASKA_CIRCLE_FILL_COLOR = "#768FB3"; export const TRIBAL_ALASKA_CIRCLE_FILL_COLOR = "#768FB3";
@ -395,3 +397,7 @@ process.env.GATSBY_2_0_SCORE_PATH;
export const MAP_TRACT_SEARCH_PATH = process.env.DATA_SOURCE === "local" ? export const MAP_TRACT_SEARCH_PATH = process.env.DATA_SOURCE === "local" ?
process.env.GATSBY_DATA_PIPELINE_SEARCH_PATH_LOCAL : process.env.GATSBY_DATA_PIPELINE_SEARCH_PATH_LOCAL :
process.env.GATSBY_2_0_MAP_TRACT_SEARCH_PATH; process.env.GATSBY_2_0_MAP_TRACT_SEARCH_PATH;
export const GATSBY_DATA_PIPELINE_TRIBAL_PATH = process.env.DATA_SOURCE === "local" ?
process.env.GATSBY_DATA_PIPELINE_TRIBAL_PATH_LOCAL :
process.env.GATSBY_2_0_TRIBAL_PATH;

View file

@ -23,9 +23,6 @@ export const USE_MAP_TUTORIAL_LINK_ES = process.env.GATSBY_CDN_TILES_BASE_URL +`
export const USE_DATA_TUTORIAL_LINK = process.env.GATSBY_CDN_TILES_BASE_URL +`/data-versions/2.0/data/score/downloadable/Using-the-CEJST-Spreadsheet-Tutorial.pdf`; export const USE_DATA_TUTORIAL_LINK = process.env.GATSBY_CDN_TILES_BASE_URL +`/data-versions/2.0/data/score/downloadable/Using-the-CEJST-Spreadsheet-Tutorial.pdf`;
export const USE_DATA_TUTORIAL_LINK_ES = process.env.GATSBY_CDN_TILES_BASE_URL +`/data-versions/2.0/data/score/downloadable/Using-the-CEJST-Spreadsheet-Tutorial-es.pdf`; export const USE_DATA_TUTORIAL_LINK_ES = process.env.GATSBY_CDN_TILES_BASE_URL +`/data-versions/2.0/data/score/downloadable/Using-the-CEJST-Spreadsheet-Tutorial-es.pdf`;
export const GITHUB_LINK = 'https://github.com/usds/justice40-tool';
export const GITHUB_LINK_ES = 'https://github.com/usds/justice40-tool/blob/main/README-es.md';
export const PAGE = defineMessages({ export const PAGE = defineMessages({
TITLE: { TITLE: {
id: 'about.page.title.text', id: 'about.page.title.text',

View file

@ -5,6 +5,7 @@ import {FormattedDate, FormattedMessage} from 'gatsby-plugin-intl';
import {defineMessages} from 'react-intl'; import {defineMessages} from 'react-intl';
import LinkTypeWrapper from '../../components/LinkTypeWrapper'; import LinkTypeWrapper from '../../components/LinkTypeWrapper';
import DownloadLink from '../../components/DownloadLink'; import DownloadLink from '../../components/DownloadLink';
import {GITHUB_LINK} from '../../constants';
export interface IDefineMessage { export interface IDefineMessage {
id: string, id: string,
@ -234,7 +235,7 @@ export const FOOTER = defineMessages({
}, },
GITHUB_LINK: { GITHUB_LINK: {
id: 'common.pages.footer.gatsby.link', id: 'common.pages.footer.gatsby.link',
defaultMessage: 'https://github.com/usds/justice40-tool', defaultMessage: GITHUB_LINK,
description: 'Navigate to the about page. This is Footer find contact link text', description: 'Navigate to the about page. This is Footer find contact link text',
}, },
CONTACT: { CONTACT: {

View file

@ -4,7 +4,7 @@ import {defineMessages} from 'react-intl';
import {FormattedMessage} from 'gatsby-plugin-intl'; import {FormattedMessage} from 'gatsby-plugin-intl';
import {boldFn, linkFn, simpleLink} from './common'; import {boldFn, linkFn, simpleLink} from './common';
export const VERSION_NUMBER = (1.0).toFixed(1); export const VERSION_NUMBER = (2.0).toFixed(1);
export const PAGE = defineMessages({ export const PAGE = defineMessages({
TILE: { TILE: {

View file

@ -1,196 +0,0 @@
import {Style} from 'maplibre-gl';
import {featureURLForTilesetName} from '../components/MapTractLayers/MapTractLayers';
import * as constants from '../data/constants';
// *********** OPEN SOURCE BASE MAP CONSTANTS ***************
const imageSuffix = constants.isMobile ? '' : '@2x';
// Original "light" Base layer
// Additional layers found here: https://carto.com/help/building-maps/basemap-list/#carto-vector-basemaps
const cartoLightBaseLayer = {
noLabels: [
`https://a.basemaps.cartocdn.com/light_nolabels/{z}/{x}/{y}${imageSuffix}.png`,
`https://b.basemaps.cartocdn.com/light_nolabels/{z}/{x}/{y}${imageSuffix}.png`,
`https://c.basemaps.cartocdn.com/light_nolabels/{z}/{x}/{y}${imageSuffix}.png`,
`https://d.basemaps.cartocdn.com/light_nolabels/{z}/{x}/{y}${imageSuffix}.png`,
],
labelsOnly: [
`https://cartodb-basemaps-a.global.ssl.fastly.net/light_only_labels/{z}/{x}/{y}${imageSuffix}.png`,
`https://cartodb-basemaps-b.global.ssl.fastly.net/light_only_labels/{z}/{x}/{y}${imageSuffix}.png`,
`https://cartodb-basemaps-c.global.ssl.fastly.net/light_only_labels/{z}/{x}/{y}${imageSuffix}.png`,
`https://cartodb-basemaps-d.global.ssl.fastly.net/light_only_labels/{z}/{x}/{y}${imageSuffix}.png`,
],
};
// *********** OPEN SOURCE STATIC MAP STYLES ***************
/**
* This function will be called when there is no MapBox token found. This function will
* return the open source base map along with styles for the chosen source.
* *
* This function returns a Style in accordance to JSON spec of MapBox
* https://docs.mapbox.com/mapbox-gl-js/style-spec/
*
* @return {Style}
*/
export const getOSBaseMap = (): Style => {
return {
'version': 8,
/**
* Census Tract Source
* */
'sources': {
/**
* The base map source source allows us to define where the tiles can be fetched from.
*/
[constants.BASE_MAP_SOURCE_NAME]: {
'type': 'raster',
'tiles': cartoLightBaseLayer.noLabels,
'minzoom': constants.GLOBAL_MIN_ZOOM,
'maxzoom': constants.GLOBAL_MAX_ZOOM,
},
// The High zoom source:
[constants.HIGH_ZOOM_SOURCE_NAME]: {
// It is only shown at high zoom levels to avoid performance issues at lower zooms
'type': 'vector',
// Our current tippecanoe command does not set an id.
// The below line promotes the GEOID10 property to the ID
'promoteId': constants.GEOID_PROPERTY,
'tiles': [featureURLForTilesetName('high')],
// Setting maxzoom here enables 'overzooming'
// e.g. continued zooming beyond the max bounds.
// More here: https://docs.mapbox.com/help/glossary/overzoom/
'minzoom': constants.GLOBAL_MIN_ZOOM_HIGH,
'maxzoom': constants.GLOBAL_MAX_ZOOM_HIGH,
},
// The Low zoom source:
[constants.LOW_ZOOM_SOURCE_NAME]: {
// "Score-low" represents a tileset at the level of bucketed tracts.
// census block group information is `dissolve`d into tracts, then
// each tract is `dissolve`d into one of ten buckets. It is meant
// to give us a favorable tradeoff between performance and fidelity.
'type': 'vector',
'promoteId': constants.GEOID_PROPERTY,
'tiles': [featureURLForTilesetName('low')],
'minzoom': constants.GLOBAL_MIN_ZOOM_LOW,
'maxzoom': constants.GLOBAL_MAX_ZOOM_LOW,
},
// The labels source:
'labels': {
'type': 'raster',
'tiles': cartoLightBaseLayer.labelsOnly,
},
},
/**
* Each object in the layers array references it's source via the source key.
*/
'layers': [
// The baseMapLayer
{
'id': constants.BASE_MAP_LAYER_ID,
'source': constants.BASE_MAP_SOURCE_NAME,
'type': 'raster',
'minzoom': constants.GLOBAL_MIN_ZOOM,
'maxzoom': constants.GLOBAL_MAX_ZOOM,
},
// A layer for labels only
{
'id': 'labels-only-layer',
'source': 'labels',
'type': 'raster',
'layout': {
'visibility': 'visible',
},
'minzoom': constants.GLOBAL_MIN_ZOOM,
'maxzoom': constants.GLOBAL_MAX_ZOOM,
},
// Low zoom layer (static) - prioritized features only
{
'id': constants.LOW_ZOOM_LAYER_ID,
'source': constants.LOW_ZOOM_SOURCE_NAME,
'source-layer': constants.SCORE_SOURCE_LAYER,
/**
* This shows features where the low score > score boundary threshold.
* In other words, this filter out non-prioritized features
*/
'filter': ['all',
['>', constants.SCORE_PROPERTY_LOW, constants.SCORE_BOUNDARY_THRESHOLD],
],
'type': 'fill',
'paint': {
'fill-color': constants.PRIORITIZED_FEATURE_FILL_COLOR,
'fill-opacity': constants.LOW_ZOOM_PRIORITIZED_FEATURE_FILL_OPACITY,
},
'minzoom': constants.GLOBAL_MIN_ZOOM_LOW,
'maxzoom': constants.GLOBAL_MAX_ZOOM_LOW,
},
// High zoom layer (static) - non-prioritized features only
{
'id': constants.HIGH_ZOOM_LAYER_ID,
'source': constants.HIGH_ZOOM_SOURCE_NAME,
'source-layer': constants.SCORE_SOURCE_LAYER,
/**
* The SCORE_PROPERTY_HIGH is a boolean value. True for
* prioritized and false for non-priorirized
*/
'filter': ['all',
['==', constants.SCORE_PROPERTY_HIGH, false],
],
'type': 'fill',
'paint': {
'fill-opacity': constants.NON_PRIORITIZED_FEATURE_FILL_OPACITY,
},
'minzoom': constants.GLOBAL_MIN_ZOOM_HIGH,
},
// High zoom layer (static) - prioritized features only
{
'id': constants.PRIORITIZED_HIGH_ZOOM_LAYER_ID,
'source': constants.HIGH_ZOOM_SOURCE_NAME,
'source-layer': constants.SCORE_SOURCE_LAYER,
/**
* The SCORE_PROPERTY_HIGH is a boolean value. True for
* prioritized and false for non-priorirized
*/
'filter': ['all',
['==', constants.SCORE_PROPERTY_HIGH, true],
],
'type': 'fill',
'paint': {
'fill-color': constants.PRIORITIZED_FEATURE_FILL_COLOR,
'fill-opacity': constants.HIGH_ZOOM_PRIORITIZED_FEATURE_FILL_OPACITY,
},
'minzoom': constants.GLOBAL_MIN_ZOOM_HIGH,
},
// High zoom layer (static) - controls the border between features
{
'id': constants.FEATURE_BORDER_LAYER_ID,
'source': constants.HIGH_ZOOM_SOURCE_NAME,
'source-layer': constants.SCORE_SOURCE_LAYER,
'type': 'line',
'paint': {
'line-color': constants.FEATURE_BORDER_COLOR,
'line-width': constants.FEATURE_BORDER_WIDTH,
'line-opacity': constants.FEATURE_BORDER_OPACITY,
},
'minzoom': constants.GLOBAL_MIN_ZOOM_FEATURE_BORDER,
'maxzoom': constants.GLOBAL_MAX_ZOOM_FEATURE_BORDER,
},
],
};
};

View file

@ -172,7 +172,6 @@
"description": "Navigate to the about page. This is Footer FOIA link text" "description": "Navigate to the about page. This is Footer FOIA link text"
}, },
"common.pages.footer.gatsby.link": { "common.pages.footer.gatsby.link": {
"defaultMessage": "https://github.com/usds/justice40-tool",
"description": "Navigate to the about page. This is Footer find contact link text" "description": "Navigate to the about page. This is Footer find contact link text"
}, },
"common.pages.footer.github.link.text": { "common.pages.footer.github.link.text": {

View file

@ -12,6 +12,7 @@ import DatasetsButton from '../components/DatasetsButton';
import SubPageNav from '../components/SubPageNav'; import SubPageNav from '../components/SubPageNav';
import * as ABOUT_COPY from '../data/copy/about'; import * as ABOUT_COPY from '../data/copy/about';
import {GITHUB_LINK, GITHUB_LINK_ES} from '../constants';
import {FEEDBACK_EMAIL} from '../data/copy/common'; import {FEEDBACK_EMAIL} from '../data/copy/common';
import {PAGES_ENDPOINTS, USWDS_BREAKPOINTS, DATA_SURVEY_LINKS} from '../data/constants'; import {PAGES_ENDPOINTS, USWDS_BREAKPOINTS, DATA_SURVEY_LINKS} from '../data/constants';
@ -164,7 +165,7 @@ const AboutPage = ({location}: IAboutPageProps) => {
imgSrc={githubIcon} imgSrc={githubIcon}
header={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_HEADING)} header={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_HEADING)}
linkText={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_LINK_TEXT)} linkText={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_LINK_TEXT)}
url={intl.locale === 'es' ? ABOUT_COPY.GITHUB_LINK_ES : ABOUT_COPY.GITHUB_LINK} url={intl.locale === 'es' ? GITHUB_LINK_ES : GITHUB_LINK}
openUrlNewTab={true} openUrlNewTab={true}
internal={false}> internal={false}>
<p> <p>

View file

@ -503,7 +503,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
</div> </div>
<p> <p>
CEQ will update the tool each year based on public feedback, research, and the availability of new data. The current version of the tool is version 1.0. CEQ will update the tool each year based on public feedback, research, and the availability of new data. The current version of the tool is version 2.0.
<a <a
class="usa-link usa-link--external" class="usa-link usa-link--external"
data-cy="" data-cy=""
@ -815,7 +815,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external" class="usa-link usa-link--external"
data-cy="" data-cy=""
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >
@ -1047,7 +1047,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -626,7 +626,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -431,7 +431,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<h2 <h2
class="j40-mt-0 j40-mb-3" class="j40-mt-0 j40-mb-3"
> >
Version 1.0 file formats Version 2.0 file formats
</h2> </h2>
<section> <section>
<div> <div>
@ -458,7 +458,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
> >
<div> <div>
<div> <div>
Version 1.0 Release update - Nov 22, 2022 Version 2.0 Release update - Nov 22, 2022
</div> </div>
<div> <div>
New & improved New & improved
@ -669,14 +669,14 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
</ul> </ul>
</div> </div>
<div> <div>
release version 1.0 release version 2.0
</div> </div>
</div> </div>
</section> </section>
</div> </div>
</section> </section>
<p> <p>
The dataset used in the 1.0 version of the tool, along with a codebook, and information about how to use the list of communities (.pdf) are available for download: The dataset used in the 2.0 version of the tool, along with a codebook, and information about how to use the list of communities (.pdf) are available for download:
</p> </p>
<p> <p>
@ -926,7 +926,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -1460,7 +1460,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -1040,7 +1040,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
data-testid="grid" data-testid="grid"
> >
<h2> <h2>
Datasets used in v1.0 methodology Datasets used in v2.0 methodology
</h2> </h2>
</div> </div>
</div> </div>
@ -3307,7 +3307,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -486,7 +486,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
data-testid="grid" data-testid="grid"
> >
<h2> <h2>
Upcoming events for CEJST version 1.0 Upcoming events for CEJST version 2.0
</h2> </h2>
</div> </div>
<div <div
@ -1367,7 +1367,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -566,7 +566,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
<a <a
class="usa-link usa-link--external footer-link-first-child" class="usa-link usa-link--external footer-link-first-child"
data-cy="check-out-the-code-on-github" data-cy="check-out-the-code-on-github"
href="https://github.com/usds/justice40-tool" href="https://github.com/DOI-DO/ceq-j40-cejst-2"
rel="noreferrer" rel="noreferrer"
target="_blank" target="_blank"
> >

View file

@ -1,6 +1,6 @@
// other CSS libraries: // other CSS libraries:
@import "~@trussworks/react-uswds/lib/index.css"; @import "~@trussworks/react-uswds/lib/index.css";
@import "../../node_modules/mapbox-gl/dist/mapbox-gl.css"; @import "../../node_modules/maplibre-gl/dist/maplibre-gl.css";
/* /*
According to the fundamental usage of USWDS: According to the fundamental usage of USWDS:
@ -537,4 +537,4 @@ button.usa-accordion__button[aria-expanded=true]:has(div[class*="disCategoryCont
.faqs-dot-alignment{ .faqs-dot-alignment{
align-self: flex-start; align-self: flex-start;
padding-top: 5px; padding-top: 5px;
} }

View file

@ -439,7 +439,6 @@ def full_run(ctx, use_cache):
if not use_cache: if not use_cache:
ctx.invoke(data_cleanup) ctx.invoke(data_cleanup)
ctx.invoke(census_data_download, zip_compress=False, use_cache=use_cache) ctx.invoke(census_data_download, zip_compress=False, use_cache=use_cache)
ctx.invoke(extract_data_sources, dataset=None, use_cache=use_cache)
ctx.invoke(etl_run, dataset=None, use_cache=use_cache) ctx.invoke(etl_run, dataset=None, use_cache=use_cache)
ctx.invoke(full_post_etl) ctx.invoke(full_post_etl)

View file

@ -160,13 +160,15 @@ DATASET_LIST = [
"name": "census_acs", "name": "census_acs",
"module_dir": "census_acs", "module_dir": "census_acs",
"class_name": "CensusACSETL", "class_name": "CensusACSETL",
"is_memory_intensive": False, # Not memory intensive, but run at the end
"is_memory_intensive": True,
}, },
{ {
"name": "census_acs_2010", "name": "census_acs_2010",
"module_dir": "census_acs_2010", "module_dir": "census_acs_2010",
"class_name": "CensusACS2010ETL", "class_name": "CensusACS2010ETL",
"is_memory_intensive": False, # Not memory intensive, but run at the end
"is_memory_intensive": True,
}, },
{ {
"name": "us_army_fuds", "name": "us_army_fuds",

View file

@ -56,14 +56,33 @@ class CensusACSETL(ExtractTransformLoad):
self.MEDIAN_INCOME_FIELD_NAME = ( self.MEDIAN_INCOME_FIELD_NAME = (
"Median household income in the past 12 months" "Median household income in the past 12 months"
) )
self.POVERTY_DATASET_TOTAL = "C17002_001E" # Estimate!!Total,
self.POVERTY_UNDER_50PCT = "C17002_002E" # Estimate!!Total!!Under .50
self.POVERTY_50PCT_TO_99PCT = (
"C17002_003E" # Estimate!!Total!!.50 to .99
)
self.POVERTY_100PCT_TO_124PCT = (
"C17002_004E" # Estimate!!Total!!1.00 to 1.24
)
self.POVERTY_125PCT_TO_149PCT = (
"C17002_005E" # Estimate!!Total!!1.25 to 1.49
)
self.POVERTY_150PCT_TO_184PCT = (
"C17002_006E" # Estimate!!Total!!1.50 to 1.84
)
self.POVERTY_185PCT_TO_199PCT = (
"C17002_007E" # Estimate!!Total!!1.85 to 1.99
)
self.POVERTY_FIELDS = [ self.POVERTY_FIELDS = [
"C17002_001E", # Estimate!!Total, self.POVERTY_DATASET_TOTAL,
"C17002_002E", # Estimate!!Total!!Under .50 self.POVERTY_UNDER_50PCT,
"C17002_003E", # Estimate!!Total!!.50 to .99 self.POVERTY_50PCT_TO_99PCT,
"C17002_004E", # Estimate!!Total!!1.00 to 1.24 self.POVERTY_100PCT_TO_124PCT,
"C17002_005E", # Estimate!!Total!!1.25 to 1.49 self.POVERTY_125PCT_TO_149PCT,
"C17002_006E", # Estimate!!Total!!1.50 to 1.84 self.POVERTY_150PCT_TO_184PCT,
"C17002_007E", # Estimate!!Total!!1.85 to 1.99 self.POVERTY_185PCT_TO_199PCT,
] ]
self.POVERTY_LESS_THAN_100_PERCENT_FPL_FIELD_NAME = ( self.POVERTY_LESS_THAN_100_PERCENT_FPL_FIELD_NAME = (
@ -75,19 +94,30 @@ class CensusACSETL(ExtractTransformLoad):
self.POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = ( self.POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = (
"Percent of individuals < 200% Federal Poverty Line" "Percent of individuals < 200% Federal Poverty Line"
) )
self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = ( self.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME = (
"Percent of individuals < 200% Federal Poverty Line, imputed" "Total population of individuals < 200% Federal Poverty Line"
)
self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = (
"Percent of individuals < 200% Federal Poverty Line," + " imputed"
)
self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME = (
"Total population of individuals < 200% Federal Poverty Line,"
+ " imputed"
)
self.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME = (
"Total population of individuals < 100% Federal Poverty Line"
)
self.IMPUTED_POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME = (
"Total population of individuals < 100% Federal Poverty Line,"
+ " imputed"
) )
self.ADJUSTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = ( self.ADJUSTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = (
"Adjusted percent of individuals < 200% Federal Poverty Line" "Adjusted percent of individuals < 200% Federal Poverty Line"
) )
self.ADJUSTED_AND_IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME_PRELIMINARY = ( self.ADJUSTED_AND_IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME_PRELIMINARY = (
"Preliminary adjusted percent of individuals < 200% Federal Poverty Line," "Preliminary adjusted percent of individuals < 200% Federal Poverty Line,"
+ " imputed" + " imputed"
) )
self.ADJUSTED_AND_IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = ( self.ADJUSTED_AND_IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME = (
"Adjusted percent of individuals < 200% Federal Poverty Line," "Adjusted percent of individuals < 200% Federal Poverty Line,"
+ " imputed" + " imputed"
@ -148,32 +178,102 @@ class CensusACSETL(ExtractTransformLoad):
) )
self.HIGH_SCHOOL_ED_FIELD = "Percent individuals age 25 or over with less than high school degree" self.HIGH_SCHOOL_ED_FIELD = "Percent individuals age 25 or over with less than high school degree"
# College attendance fields ## Off-Campus University Student Poverty Fields
self.COLLEGE_ATTENDANCE_TOTAL_POPULATION_ASKED = ( # Estimate!!Total:!!Income in the past 12 months below the poverty level:!!
"B14004_001E" # Estimate!!Total # Enrolled in school:!!Enrolled in college undergraduate years
) self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE = "B14006_009E"
self.COLLEGE_ATTENDANCE_MALE_ENROLLED_PUBLIC = "B14004_003E" # Estimate!!Total!!Male!!Enrolled in public college or graduate school # Estimate!!Total:!!Income in the past 12 months below the poverty level:!!
self.COLLEGE_ATTENDANCE_MALE_ENROLLED_PRIVATE = "B14004_008E" # Estimate!!Total!!Male!!Enrolled in private college or graduate school # Enrolled in school:!!Enrolled in graduate or professional school
self.COLLEGE_ATTENDANCE_FEMALE_ENROLLED_PUBLIC = "B14004_019E" # Estimate!!Total!!Female!!Enrolled in public college or graduate school self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_GRADUATE = "B14006_010E"
self.COLLEGE_ATTENDANCE_FEMALE_ENROLLED_PRIVATE = "B14004_024E" # Estimate!!Total!!Female!!Enrolled in private college or graduate school # Estimate!!Total:!!Income in the past 12 months at or above the poverty level:!!
# Enrolled in school:!!Enrolled in college undergraduate years
self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_UNDERGRADUATE = "B14006_019E"
# Estimate!!Total:!!Income in the past 12 months at or above the poverty level:!!
# Enrolled in school:!!Enrolled in graduate or professional school
self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_GRADUATE = "B14006_020E"
self.COLLEGE_ATTENDANCE_FIELDS = [ self.UNIVERSITY_POVERTY_FIELDS = [
self.COLLEGE_ATTENDANCE_TOTAL_POPULATION_ASKED, self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE,
self.COLLEGE_ATTENDANCE_MALE_ENROLLED_PUBLIC, self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_GRADUATE,
self.COLLEGE_ATTENDANCE_MALE_ENROLLED_PRIVATE, self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_UNDERGRADUATE,
self.COLLEGE_ATTENDANCE_FEMALE_ENROLLED_PUBLIC, self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_GRADUATE,
self.COLLEGE_ATTENDANCE_FEMALE_ENROLLED_PRIVATE,
] ]
self.COLLEGE_ATTENDANCE_FIELD = ( self.OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD = (
"Population below poverty line enrolled in an undergraduate program"
+ " (excluding students living in university housing)"
)
self.IMPUTED_OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD = (
"Population below poverty line enrolled in an undergraduate program"
+ " (excluding students living in university housing), imputed"
)
self.OFFCAMPUS_UNDERGRADUATE_FIELD = (
"Population enrolled in an undergraduate program"
+ " (excluding students living in university housing)"
)
self.IMPUTED_OFFCAMPUS_UNDERGRADUATE_FIELD = (
"Population enrolled in an undergraduate program"
+ " (excluding students living in university housing), imputed"
)
self.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD = (
"Population below poverty line enrolled in an undergraduate, graduate, or professional program"
+ " (excluding students living in university housing)"
)
self.IMPUTED_OFFCAMPUS_UNIVERSITY_POVERTY_FIELD = (
"Population below poverty line enrolled in an undergraduate, graduate, or professional program"
+ " (excluding students living in university housing), imputed"
)
self.OFFCAMPUS_UNIVERSITY_FIELD = (
"Population enrolled in an undergraduate, graduate, or professional program"
+ " (excluding students living in university housing)"
)
self.IMPUTED_OFFCAMPUS_UNIVERSITY_FIELD = (
"Population enrolled in an undergraduate, graduate, or professional program"
+ " (excluding students living in university housing), imputed"
)
self.IMPUTED_POVERTY_DATASET_TOTAL = (
"Total population in poverty dataset (all income levels)"
+ ", imputed"
)
self.OVERALL_RATIO_200FPL_TO_100FPL = (
"Ratio <200% FPL to <100% FPL, overall"
)
self.OFFCAMPUS_UNIVERSITY_POPULATION_COUNT_UNDER_200PCT_FPL = "Estimated population count of off-campus university students <200% FPL"
self.POPULATION_COUNT_UNDER_200PCT_FPL_MINUS_OFFCAMPUS_UNIVERSITY_ESTIMATE = (
"Estimated population count of people in a househould with income <200% FPL"
+ ", excluding all university students"
)
self.POPULATION_TOTAL_IN_POVERTY_DATASET_MINUS_OFFCAMPUS_UNVERSITY = (
"Everyone in poverty dataset"
+ ", minus all off-campus university students"
)
# University Enrollment Rates (15+ population, includes students in dorms)
self.UNIVERSITY_ATTENDANCE_TOTAL_POPULATION_ASKED = (
"B14004_001E" # Estimate!!Total
)
self.UNIVERSITY_ATTENDANCE_MALE_ENROLLED_PUBLIC = "B14004_003E" # Estimate!!Total!!Male!!Enrolled in public college or graduate school
self.UNIVERSITY_ATTENDANCE_MALE_ENROLLED_PRIVATE = "B14004_008E" # Estimate!!Total!!Male!!Enrolled in private college or graduate school
self.UNIVERSITY_ATTENDANCE_FEMALE_ENROLLED_PUBLIC = "B14004_019E" # Estimate!!Total!!Female!!Enrolled in public college or graduate school
self.UNIVERSITY_ATTENDANCE_FEMALE_ENROLLED_PRIVATE = "B14004_024E" # Estimate!!Total!!Female!!Enrolled in private college or graduate school
self.UNIVERSITY_ATTENDANCE_FIELDS = [
self.UNIVERSITY_ATTENDANCE_TOTAL_POPULATION_ASKED,
self.UNIVERSITY_ATTENDANCE_MALE_ENROLLED_PUBLIC,
self.UNIVERSITY_ATTENDANCE_MALE_ENROLLED_PRIVATE,
self.UNIVERSITY_ATTENDANCE_FEMALE_ENROLLED_PUBLIC,
self.UNIVERSITY_ATTENDANCE_FEMALE_ENROLLED_PRIVATE,
]
self.UNIVERSITY_ATTENDANCE_FIELD = (
"Percent enrollment in college or graduate school" "Percent enrollment in college or graduate school"
) )
self.IMPUTED_COLLEGE_ATTENDANCE_FIELD = ( self.IMPUTED_UNIVERSITY_ATTENDANCE_FIELD = (
"Percent enrollment in college or graduate school, imputed" "Percent enrollment in college or graduate school, imputed"
) )
self.COLLEGE_NON_ATTENDANCE_FIELD = "Percent of population not currently enrolled in college or graduate school" self.UNIVERSITY_NON_ATTENDANCE_FIELD = "Percent of population not currently enrolled in college or graduate school"
self.RE_FIELDS = [ self.RE_FIELDS = [
"B02001_001E", "B02001_001E",
@ -295,11 +395,29 @@ class CensusACSETL(ExtractTransformLoad):
self.POVERTY_LESS_THAN_100_PERCENT_FPL_FIELD_NAME, self.POVERTY_LESS_THAN_100_PERCENT_FPL_FIELD_NAME,
self.POVERTY_LESS_THAN_150_PERCENT_FPL_FIELD_NAME, self.POVERTY_LESS_THAN_150_PERCENT_FPL_FIELD_NAME,
self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME, self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME,
self.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME,
self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME,
self.MEDIAN_HOUSE_VALUE_FIELD_NAME, self.MEDIAN_HOUSE_VALUE_FIELD_NAME,
self.HIGH_SCHOOL_ED_FIELD, self.HIGH_SCHOOL_ED_FIELD,
self.COLLEGE_ATTENDANCE_FIELD, self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE,
self.COLLEGE_NON_ATTENDANCE_FIELD, self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_GRADUATE,
self.IMPUTED_COLLEGE_ATTENDANCE_FIELD, self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_UNDERGRADUATE,
self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_GRADUATE,
self.OVERALL_RATIO_200FPL_TO_100FPL,
self.OFFCAMPUS_UNIVERSITY_POPULATION_COUNT_UNDER_200PCT_FPL,
self.POPULATION_COUNT_UNDER_200PCT_FPL_MINUS_OFFCAMPUS_UNIVERSITY_ESTIMATE,
self.POPULATION_TOTAL_IN_POVERTY_DATASET_MINUS_OFFCAMPUS_UNVERSITY,
self.UNIVERSITY_ATTENDANCE_FIELD,
self.UNIVERSITY_NON_ATTENDANCE_FIELD,
self.IMPUTED_UNIVERSITY_ATTENDANCE_FIELD,
self.OFFCAMPUS_UNIVERSITY_FIELD,
self.IMPUTED_OFFCAMPUS_UNIVERSITY_FIELD,
self.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME,
self.IMPUTED_POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME,
self.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD,
self.IMPUTED_OFFCAMPUS_UNIVERSITY_POVERTY_FIELD,
self.POVERTY_DATASET_TOTAL,
self.IMPUTED_POVERTY_DATASET_TOTAL,
field_names.IMPUTED_INCOME_FLAG_FIELD_NAME, field_names.IMPUTED_INCOME_FLAG_FIELD_NAME,
] ]
+ self.RE_OUTPUT_FIELDS + self.RE_OUTPUT_FIELDS
@ -315,6 +433,7 @@ class CensusACSETL(ExtractTransformLoad):
) )
self.df: pd.DataFrame self.df: pd.DataFrame
self.geo_df: gpd.GeoDataFrame
def get_data_sources(self) -> [DataSource]: def get_data_sources(self) -> [DataSource]:
# Define the variables to retrieve # Define the variables to retrieve
@ -328,7 +447,8 @@ class CensusACSETL(ExtractTransformLoad):
+ self.POVERTY_FIELDS + self.POVERTY_FIELDS
+ self.EDUCATIONAL_FIELDS + self.EDUCATIONAL_FIELDS
+ self.RE_FIELDS + self.RE_FIELDS
+ self.COLLEGE_ATTENDANCE_FIELDS + self.UNIVERSITY_POVERTY_FIELDS
+ self.UNIVERSITY_ATTENDANCE_FIELDS
+ self.AGE_INPUT_FIELDS + self.AGE_INPUT_FIELDS
) )
@ -383,11 +503,7 @@ class CensusACSETL(ExtractTransformLoad):
dtype={field_names.GEOID_TRACT_FIELD: "string"}, dtype={field_names.GEOID_TRACT_FIELD: "string"},
) )
def transform(self) -> None: # Load the census GeoJSON. irst this looks locally; if there's no local
df = self.df
# Here we join the geometry of the US to the dataframe so that we can impute
# The income of neighbors. first this looks locally; if there's no local
# geojson file for all of the US, this will read it off of S3 # geojson file for all of the US, this will read it off of S3
logger.debug("Reading in geojson for the country") logger.debug("Reading in geojson for the country")
if not os.path.exists( if not os.path.exists(
@ -400,13 +516,18 @@ class CensusACSETL(ExtractTransformLoad):
self.DATA_PATH, self.DATA_PATH,
) )
geo_df = gpd.read_file( self.geo_df = gpd.read_file(
self.DATA_PATH / "census" / "geojson" / "us.json", self.DATA_PATH / "census" / "geojson" / "us.json",
) )
def transform(self) -> None:
df = self.df
# Here we join the geometry of the US to the dataframe so that we can impute
# The income of neighbors.
df = CensusACSETL.merge_geojson( df = CensusACSETL.merge_geojson(
df=df, df=df,
usa_geo_df=geo_df, usa_geo_df=self.geo_df,
) )
# Rename some fields. # Rename some fields.
@ -455,24 +576,57 @@ class CensusACSETL(ExtractTransformLoad):
# Calculate percent at different poverty thresholds # Calculate percent at different poverty thresholds
df[self.POVERTY_LESS_THAN_100_PERCENT_FPL_FIELD_NAME] = ( df[self.POVERTY_LESS_THAN_100_PERCENT_FPL_FIELD_NAME] = (
df["C17002_002E"] + df["C17002_003E"] df[self.POVERTY_UNDER_50PCT] + df[self.POVERTY_50PCT_TO_99PCT]
) / df["C17002_001E"] ) / df[self.POVERTY_DATASET_TOTAL]
df[self.POVERTY_LESS_THAN_150_PERCENT_FPL_FIELD_NAME] = ( df[self.POVERTY_LESS_THAN_150_PERCENT_FPL_FIELD_NAME] = (
df["C17002_002E"] df[self.POVERTY_UNDER_50PCT]
+ df["C17002_003E"] + df[self.POVERTY_50PCT_TO_99PCT]
+ df["C17002_004E"] + df[self.POVERTY_100PCT_TO_124PCT]
+ df["C17002_005E"] + df[self.POVERTY_125PCT_TO_149PCT]
) / df["C17002_001E"] ) / df[self.POVERTY_DATASET_TOTAL]
df[self.POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME] = ( df[self.POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME] = (
df["C17002_002E"] df[self.POVERTY_UNDER_50PCT]
+ df["C17002_003E"] + df[self.POVERTY_50PCT_TO_99PCT]
+ df["C17002_004E"] + df[self.POVERTY_100PCT_TO_124PCT]
+ df["C17002_005E"] + df[self.POVERTY_125PCT_TO_149PCT]
+ df["C17002_006E"] + df[self.POVERTY_150PCT_TO_184PCT]
+ df["C17002_007E"] + df[self.POVERTY_185PCT_TO_199PCT]
) / df["C17002_001E"] ) / df[self.POVERTY_DATASET_TOTAL]
# COUNT of Povery less than 200%
df[self.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME] = (
df[self.POVERTY_UNDER_50PCT]
+ df[self.POVERTY_50PCT_TO_99PCT]
+ df[self.POVERTY_100PCT_TO_124PCT]
+ df[self.POVERTY_125PCT_TO_149PCT]
+ df[self.POVERTY_150PCT_TO_184PCT]
+ df[self.POVERTY_185PCT_TO_199PCT]
)
df[self.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME] = (
df[self.POVERTY_UNDER_50PCT] + df[self.POVERTY_50PCT_TO_99PCT]
)
# Off-Campus University Fields:
df[self.OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD] = df[
self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE
]
df[self.OFFCAMPUS_UNDERGRADUATE_FIELD] = (
df[self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE]
+ df[self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_UNDERGRADUATE]
)
df[self.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD] = (
df[self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE]
+ df[self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_GRADUATE]
)
df[self.OFFCAMPUS_UNIVERSITY_FIELD] = (
df[self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE]
+ df[self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_GRADUATE]
+ df[self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_UNDERGRADUATE]
+ df[self.OFFCAMPUS_UNIVERSITY_ABOVE_POVERTY_GRADUATE]
)
# Calculate educational attainment # Calculate educational attainment
educational_numerator_fields = [ educational_numerator_fields = [
@ -596,16 +750,16 @@ class CensusACSETL(ExtractTransformLoad):
df[sum_columns].sum(axis=1) / df[field_names.TOTAL_POP_FIELD] df[sum_columns].sum(axis=1) / df[field_names.TOTAL_POP_FIELD]
) )
# Calculate college attendance and adjust low income # Calculate university attendance and adjust low income
df[self.COLLEGE_ATTENDANCE_FIELD] = ( df[self.UNIVERSITY_ATTENDANCE_FIELD] = (
df[self.COLLEGE_ATTENDANCE_MALE_ENROLLED_PUBLIC] df[self.UNIVERSITY_ATTENDANCE_MALE_ENROLLED_PUBLIC]
+ df[self.COLLEGE_ATTENDANCE_MALE_ENROLLED_PRIVATE] + df[self.UNIVERSITY_ATTENDANCE_MALE_ENROLLED_PRIVATE]
+ df[self.COLLEGE_ATTENDANCE_FEMALE_ENROLLED_PUBLIC] + df[self.UNIVERSITY_ATTENDANCE_FEMALE_ENROLLED_PUBLIC]
+ df[self.COLLEGE_ATTENDANCE_FEMALE_ENROLLED_PRIVATE] + df[self.UNIVERSITY_ATTENDANCE_FEMALE_ENROLLED_PRIVATE]
) / df[self.COLLEGE_ATTENDANCE_TOTAL_POPULATION_ASKED] ) / df[self.UNIVERSITY_ATTENDANCE_TOTAL_POPULATION_ASKED]
df[self.COLLEGE_NON_ATTENDANCE_FIELD] = ( df[self.UNIVERSITY_NON_ATTENDANCE_FIELD] = (
1 - df[self.COLLEGE_ATTENDANCE_FIELD] 1 - df[self.UNIVERSITY_ATTENDANCE_FIELD]
) )
# we impute income for both income measures # we impute income for both income measures
@ -618,8 +772,36 @@ class CensusACSETL(ExtractTransformLoad):
imputed_field_name=self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME, imputed_field_name=self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME,
), ),
CensusACSETL.ImputeVariables( CensusACSETL.ImputeVariables(
raw_field_name=self.COLLEGE_ATTENDANCE_FIELD, raw_field_name=self.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME,
imputed_field_name=self.IMPUTED_COLLEGE_ATTENDANCE_FIELD, imputed_field_name=self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME,
),
CensusACSETL.ImputeVariables(
raw_field_name=self.OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD,
imputed_field_name=self.IMPUTED_OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD,
),
CensusACSETL.ImputeVariables(
raw_field_name=self.OFFCAMPUS_UNDERGRADUATE_FIELD,
imputed_field_name=self.IMPUTED_OFFCAMPUS_UNDERGRADUATE_FIELD,
),
CensusACSETL.ImputeVariables(
raw_field_name=self.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD,
imputed_field_name=self.IMPUTED_OFFCAMPUS_UNIVERSITY_POVERTY_FIELD,
),
CensusACSETL.ImputeVariables(
raw_field_name=self.OFFCAMPUS_UNIVERSITY_FIELD,
imputed_field_name=self.IMPUTED_OFFCAMPUS_UNIVERSITY_FIELD,
),
CensusACSETL.ImputeVariables(
raw_field_name=self.UNIVERSITY_ATTENDANCE_FIELD,
imputed_field_name=self.IMPUTED_UNIVERSITY_ATTENDANCE_FIELD,
),
CensusACSETL.ImputeVariables(
raw_field_name=self.POVERTY_DATASET_TOTAL,
imputed_field_name=self.IMPUTED_POVERTY_DATASET_TOTAL,
),
CensusACSETL.ImputeVariables(
raw_field_name=self.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME,
imputed_field_name=self.IMPUTED_POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME,
), ),
], ],
geo_df=df, geo_df=df,
@ -629,21 +811,156 @@ class CensusACSETL(ExtractTransformLoad):
logger.debug("Calculating with imputed values") logger.debug("Calculating with imputed values")
# pylint: disable=pointless-string-statement
"""
POVERTY CALCULATION
Goal: Calculate the portion of people in in households where income
is less than or equal to twice the federal poverty level,
not including students enrolled in higher ed.
Approach: To do this, we must make an adjustment to remove off-campus university students
from numbers reported by the ACS. We use the "interpolated" method to estimate
the number of off-campus university students actually included in the unadjusted numerator.
Interpolated Poverty Calculation, Step-by-Step Methodology
Step 1: Estimate ratio of overall population <200% FPL : overall population <100% FPL
Overall ratio 200:100 FPL =
max(
max[
Total population <200% FPL,
1
]
/
max[
Total population <100% FPL,
1
],
1)
Step 2: Interpolate the number of off-campus university students <200% FPL
Estimated university population <200% FPL =
min(
max[
University population <100% FPL x Overall ratio 200:100 FPL,
0 # nb: actual lower bound is the population university <100%, because ratio is clipped at 1
],
Total number of off-campus university students
)
Step 3: Subtract off-campus university students from both numerator and denominator of the unadjusted poverty rate
Adjusted poverty rate =
min(
max [
(
max[
Overall <200% FPL population - Estimated university population <200% FPL,
0
]
/
max[
Everyone in poverty dataset - University total population,
1
],
),
0
],
1
)
"""
# pylint: enable=pointless-string-statement
### Add fields for poverty calculation numerator
# Step 1: Estimate ratio of overall population <200% FPL : overall population <100% FPL
df[self.OVERALL_RATIO_200FPL_TO_100FPL] = (
df[self.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME]
.fillna(
df[
self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME
]
# Use clip to for consistency with denominator
)
.clip(lower=1)
/ df[self.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME]
.fillna(
df[
self.IMPUTED_POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME
]
# Use clip to ensure we never divide by 0
)
.clip(lower=1)
# Use clip to ensure that the ratio of poverty <200%:<100% is not lower than 1
).clip(lower=1)
# Step 2: Interpolate the number of off-campus university students <200% FPL
df[self.OFFCAMPUS_UNIVERSITY_POPULATION_COUNT_UNDER_200PCT_FPL] = (
df[self.OVERALL_RATIO_200FPL_TO_100FPL]
* (
df[
self.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD
].fillna( # corresponds to <100% FPL
df[self.IMPUTED_OFFCAMPUS_UNIVERSITY_POVERTY_FIELD]
)
)
# ensure that estimated count of university <200% is between 0 and the total number of university students
# nb: actual lower bound is university <100%, because ratio is clipped at 1
).clip(
lower=0,
upper=df[self.OFFCAMPUS_UNIVERSITY_FIELD].fillna(
df[self.IMPUTED_OFFCAMPUS_UNIVERSITY_FIELD]
),
)
# Step 3a: Subtract off-campus university students from numerator of the unadjusted poverty rate
df[ df[
self.ADJUSTED_AND_IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME self.POPULATION_COUNT_UNDER_200PCT_FPL_MINUS_OFFCAMPUS_UNIVERSITY_ESTIMATE
] = ( ] = (
df[self.POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME].fillna( df[self.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME].fillna(
df[self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME] df[
self.IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME
]
) )
- df[self.COLLEGE_ATTENDANCE_FIELD].fillna( - df[self.OFFCAMPUS_UNIVERSITY_POPULATION_COUNT_UNDER_200PCT_FPL]
df[self.IMPUTED_COLLEGE_ATTENDANCE_FIELD] # Use clip as extra precaution against values <=0
)
# Use clip to ensure that the values are not negative if college attendance
# is very high
).clip( ).clip(
lower=0 lower=0
) )
### Add denominator field for poverty calculation
# Step 3b: Subtract off-campus university students from denominator of the unadjusted poverty rate
df[
self.POPULATION_TOTAL_IN_POVERTY_DATASET_MINUS_OFFCAMPUS_UNVERSITY
] = (
df[self.POVERTY_DATASET_TOTAL].fillna(
df[self.IMPUTED_POVERTY_DATASET_TOTAL]
)
- df[self.OFFCAMPUS_UNIVERSITY_FIELD].fillna(
df[self.IMPUTED_OFFCAMPUS_UNIVERSITY_FIELD]
)
# Use clip as extra precaution against values <=0
).clip(
lower=1
)
# Step 3c: Put the numerator and denominator together to calculate the final adjusted poverty rate
# NB: numerator and denominator are both already imputed and clipped
df[
self.ADJUSTED_AND_IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME
] = (
df[
self.POPULATION_COUNT_UNDER_200PCT_FPL_MINUS_OFFCAMPUS_UNIVERSITY_ESTIMATE
]
/ df[
self.POPULATION_TOTAL_IN_POVERTY_DATASET_MINUS_OFFCAMPUS_UNVERSITY
]
# Clip to ensure percentage is between 0 and
).clip(
lower=0, upper=1
)
## CHECK OUTPUT AND SAVE RESULTS
# All values should have a value at this point # All values should have a value at this point
assert ( assert (
# For tracts with >0 population # For tracts with >0 population

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,25 @@
# How to generate the sample data in this folder
The sample data in this folder can be easily generated by debugging the `data_pipeline/etl/sources/census_acs/etl.py` file
and exporting data using the debugger console. Examples of this exporting are below.
## Why in pickle format?
Exporting as a Pickle file keeps all the metadata about the columns including the data types. If we were to export as CSV then we will need
to code the data types in the test fixtures for all the columns for the comparison to be correct.
### Transform tests input files
- `acs_transform_input.pkl` - this file contains downloaded Census data that is used as input.
- `acs_transform_geojson.geojson` - this file contains the Census GeoJSON data that is used as input.
1. Place a breakpoint in `data_pipeline/etl/sources/census_acs/etl.py` in the `transform` method right at the beginning
and start the debugger running the ETL run command for Census ACS (`etl-run -d census_acs`).
1. Partially export the `self.df` and `self.geo_df` data to files once the debugger pauses at the breakpoint. Use these
sample commands in the debugger console.
```python
t_list = ['01073001100', '01073001400', '01073002000', '01073003802', '01073004000']
self.geo_df[self.geo_df['GEOID10'].isin(t_list)].to_file('data_pipeline/tests/sources/census_acs/data/transform/acs_transform_geojson.geojson')
test_df = self.df[self.df['GEOID10_TRACT'].isin(t_list)].copy()
# Setting this one row to N/A allows the imputations code to succeed
test_df.at[4, self.OFFCAMPUS_UNIVERSITY_BELOW_POVERTY_UNDERGRADUATE] = pd.NA
test_df.to_pickle('data_pipeline/tests/sources/census_acs/data/transform/acs_transform_input.pkl')
```

View file

@ -0,0 +1,11 @@
{
"type": "FeatureCollection",
"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
"features": [
{ "type": "Feature", "properties": { "STATEFP10": "01", "COUNTYFP10": "073", "TRACTCE10": "001400", "GEOID10": "01073001400", "NAME10": "14", "NAMELSAD10": "Census Tract 14", "MTFCC10": "G5020", "FUNCSTAT10": "S", "ALAND10": 2241287, "AWATER10": 0, "INTPTLAT10": "+33.5261497", "INTPTLON10": "-086.8351469" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -86.840884, 33.527586 ], [ -86.837824, 33.528871 ], [ -86.83693, 33.530023 ], [ -86.83679, 33.530205 ], [ -86.83639, 33.530805 ], [ -86.835352, 33.531179 ], [ -86.833843, 33.532003 ], [ -86.832035, 33.532595 ], [ -86.831245, 33.532854 ], [ -86.829135, 33.533924 ], [ -86.827029, 33.534708 ], [ -86.826575, 33.534968 ], [ -86.825634, 33.535508 ], [ -86.824369, 33.536271 ], [ -86.823926, 33.536515 ], [ -86.823758, 33.536606 ], [ -86.823683, 33.536184 ], [ -86.823591, 33.535542 ], [ -86.823596, 33.53486 ], [ -86.823637, 33.534404 ], [ -86.823739, 33.533793 ], [ -86.82384, 33.533389 ], [ -86.824083, 33.532657 ], [ -86.824469, 33.531855 ], [ -86.825589, 33.530001 ], [ -86.825945, 33.529412 ], [ -86.826278, 33.528861 ], [ -86.82662, 33.528272 ], [ -86.826703, 33.528081 ], [ -86.826785, 33.5278 ], [ -86.826911, 33.527375 ], [ -86.827001, 33.526685 ], [ -86.827025, 33.525543 ], [ -86.827091, 33.524614 ], [ -86.827086, 33.523811 ], [ -86.827086, 33.523767 ], [ -86.827091, 33.523677 ], [ -86.827125, 33.523051 ], [ -86.827117, 33.522607 ], [ -86.827088, 33.522283 ], [ -86.827006, 33.521836 ], [ -86.826987, 33.521701 ], [ -86.826807, 33.521107 ], [ -86.829003, 33.520829 ], [ -86.829532, 33.520498 ], [ -86.830151, 33.52015 ], [ -86.830507, 33.520037 ], [ -86.830666, 33.519986 ], [ -86.831415, 33.519802 ], [ -86.831968, 33.519782 ], [ -86.83261, 33.519835 ], [ -86.834123, 33.520139 ], [ -86.834279, 33.52017 ], [ -86.834582, 33.520223 ], [ -86.836148, 33.520497 ], [ -86.837263, 33.520682 ], [ -86.837708, 33.52073 ], [ -86.837983, 33.520761 ], [ -86.838571, 33.520791 ], [ -86.839295, 33.520792 ], [ -86.83942, 33.520792 ], [ -86.839596, 33.520792 ], [ -86.840901, 33.520808 ], [ -86.842445, 33.520826 ], [ -86.843989, 33.520846 ], [ -86.845617, 33.520865 ], [ -86.846194, 33.520873 ], [ -86.84942, 33.520908 ], [ -86.849421, 33.521133 ], [ -86.84944, 33.521596 ], [ -86.849499, 33.521692 ], [ -86.849433, 33.522393 ], [ -86.849582, 33.523085 ], [ -86.849667, 33.523435 ], [ -86.849748, 33.523804 ], [ -86.849783, 33.523978 ], [ -86.849812, 33.52412 ], [ -86.84985, 33.52431 ], [ -86.849984, 33.524884 ], [ -86.850106, 33.525442 ], [ -86.85023, 33.525973 ], [ -86.850303, 33.526624 ], [ -86.850312, 33.526707 ], [ -86.850132, 33.526701 ], [ -86.84979, 33.526605 ], [ -86.849721, 33.526576 ], [ -86.849042, 33.526293 ], [ -86.848434, 33.526035 ], [ -86.847961, 33.525834 ], [ -86.847891, 33.525805 ], [ -86.847787, 33.525787 ], [ -86.845591, 33.525405 ], [ -86.842991, 33.526505 ], [ -86.840884, 33.527586 ] ] ] } },
{ "type": "Feature", "properties": { "STATEFP10": "01", "COUNTYFP10": "073", "TRACTCE10": "001100", "GEOID10": "01073001100", "NAME10": "11", "NAMELSAD10": "Census Tract 11", "MTFCC10": "G5020", "FUNCSTAT10": "S", "ALAND10": 11428313, "AWATER10": 3862, "INTPTLAT10": "+33.5423337", "INTPTLON10": "-086.8765161" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -86.882435, 33.552326 ], [ -86.881871, 33.552684 ], [ -86.881177, 33.552987 ], [ -86.880549, 33.553159 ], [ -86.880024, 33.553247 ], [ -86.879772, 33.553268 ], [ -86.879039, 33.553269 ], [ -86.877437, 33.553193 ], [ -86.873225, 33.553055 ], [ -86.871055, 33.55294 ], [ -86.870488, 33.552953 ], [ -86.870028, 33.552996 ], [ -86.869495, 33.553081 ], [ -86.86902, 33.553198 ], [ -86.868549, 33.553418 ], [ -86.868064, 33.553783 ], [ -86.867614, 33.554299 ], [ -86.867234, 33.554811 ], [ -86.866179, 33.556229 ], [ -86.865886, 33.556531 ], [ -86.865428, 33.556909 ], [ -86.8651, 33.557135 ], [ -86.863343, 33.558141 ], [ -86.861725, 33.55911 ], [ -86.860721, 33.559702 ], [ -86.859731, 33.560316 ], [ -86.858524, 33.561035 ], [ -86.858002, 33.561372 ], [ -86.857324, 33.561719 ], [ -86.856471, 33.562004 ], [ -86.856129, 33.562064 ], [ -86.851595, 33.559404 ], [ -86.848891, 33.559805 ], [ -86.852991, 33.552104 ], [ -86.853223, 33.551239 ], [ -86.855191, 33.543905 ], [ -86.855694, 33.538151 ], [ -86.85606, 33.537889 ], [ -86.856269, 33.537719 ], [ -86.856472, 33.53752 ], [ -86.856779, 33.53719 ], [ -86.857174, 33.536763 ], [ -86.85758, 33.536346 ], [ -86.857795, 33.536127 ], [ -86.858137, 33.535851 ], [ -86.858444, 33.535632 ], [ -86.858851, 33.53539 ], [ -86.85921, 33.535203 ], [ -86.860817, 33.53405 ], [ -86.860936, 33.534028 ], [ -86.864589, 33.533319 ], [ -86.86658, 33.532927 ], [ -86.867357, 33.53278 ], [ -86.867868, 33.532651 ], [ -86.868475, 33.53246 ], [ -86.870603, 33.531691 ], [ -86.870854, 33.531601 ], [ -86.875846, 33.529909 ], [ -86.878638, 33.528917 ], [ -86.878921, 33.528817 ], [ -86.879007, 33.528787 ], [ -86.878999, 33.529822 ], [ -86.8796, 33.529851 ], [ -86.88017, 33.529879 ], [ -86.880793, 33.529909 ], [ -86.880872, 33.529919 ], [ -86.881324, 33.529935 ], [ -86.88209, 33.529947 ], [ -86.882375, 33.529953 ], [ -86.882779, 33.529958 ], [ -86.88349, 33.529994 ], [ -86.884213, 33.530005 ], [ -86.885074, 33.530017 ], [ -86.885649, 33.530026 ], [ -86.885884, 33.530034 ], [ -86.886535, 33.530037 ], [ -86.887886, 33.530039 ], [ -86.887968, 33.530043 ], [ -86.889407, 33.530064 ], [ -86.889415, 33.529614 ], [ -86.889422, 33.529159 ], [ -86.890877, 33.529167 ], [ -86.891569, 33.52917 ], [ -86.891643, 33.52917 ], [ -86.891832, 33.528964 ], [ -86.892208, 33.528556 ], [ -86.893389, 33.527586 ], [ -86.894533, 33.526558 ], [ -86.897192, 33.528505 ], [ -86.896992, 33.530504 ], [ -86.901104, 33.532548 ], [ -86.903492, 33.533105 ], [ -86.908792, 33.535505 ], [ -86.910592, 33.536105 ], [ -86.909292, 33.538305 ], [ -86.909392, 33.539005 ], [ -86.908162, 33.539693 ], [ -86.908095, 33.539893 ], [ -86.907692, 33.540605 ], [ -86.907192, 33.542205 ], [ -86.905392, 33.541305 ], [ -86.903103, 33.542516 ], [ -86.901996, 33.543221 ], [ -86.901608, 33.543456 ], [ -86.901167, 33.54412 ], [ -86.900913, 33.544384 ], [ -86.899614, 33.545658 ], [ -86.899456, 33.545794 ], [ -86.896838, 33.547888 ], [ -86.896206, 33.548394 ], [ -86.895817, 33.548705 ], [ -86.895692, 33.548805 ], [ -86.894818, 33.548368 ], [ -86.894292, 33.548105 ], [ -86.893192, 33.548205 ], [ -86.893149, 33.548341 ], [ -86.892544, 33.550241 ], [ -86.892492, 33.550404 ], [ -86.889392, 33.550505 ], [ -86.888392, 33.549304 ], [ -86.886592, 33.550205 ], [ -86.884997, 33.550205 ], [ -86.8849, 33.551105 ], [ -86.883737, 33.551726 ], [ -86.883615, 33.551775 ], [ -86.883463, 33.551713 ], [ -86.883374, 33.551707 ], [ -86.883263, 33.551733 ], [ -86.883102, 33.551828 ], [ -86.882615, 33.552213 ], [ -86.882435, 33.552326 ] ] ] } },
{ "type": "Feature", "properties": { "STATEFP10": "01", "COUNTYFP10": "073", "TRACTCE10": "002000", "GEOID10": "01073002000", "NAME10": "20", "NAMELSAD10": "Census Tract 20", "MTFCC10": "G5020", "FUNCSTAT10": "S", "ALAND10": 3605025, "AWATER10": 0, "INTPTLAT10": "+33.5591908", "INTPTLON10": "-086.7233518" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -86.713902, 33.539302 ], [ -86.714347, 33.539121 ], [ -86.714768, 33.539008 ], [ -86.715254, 33.538933 ], [ -86.715891, 33.538922 ], [ -86.716497, 33.538825 ], [ -86.716288, 33.539004 ], [ -86.719691, 33.540211 ], [ -86.720436, 33.543861 ], [ -86.72065, 33.544904 ], [ -86.72064, 33.545552 ], [ -86.720624, 33.545854 ], [ -86.720641, 33.546602 ], [ -86.720666, 33.547704 ], [ -86.720671, 33.548361 ], [ -86.720685, 33.549495 ], [ -86.720702, 33.550497 ], [ -86.720715, 33.551287 ], [ -86.720184, 33.551274 ], [ -86.720099, 33.551271 ], [ -86.719672, 33.551264 ], [ -86.719527, 33.551266 ], [ -86.718937, 33.55126 ], [ -86.718379, 33.551245 ], [ -86.717792, 33.551257 ], [ -86.717277, 33.551242 ], [ -86.716713, 33.551233 ], [ -86.716142, 33.551223 ], [ -86.715577, 33.551214 ], [ -86.715006, 33.5512 ], [ -86.714442, 33.551191 ], [ -86.713877, 33.551181 ], [ -86.713881, 33.552963 ], [ -86.713323, 33.552953 ], [ -86.712752, 33.552939 ], [ -86.71275, 33.553417 ], [ -86.712753, 33.554307 ], [ -86.713324, 33.554333 ], [ -86.713888, 33.554326 ], [ -86.714462, 33.55435 ], [ -86.71507, 33.554366 ], [ -86.715588, 33.554385 ], [ -86.71612, 33.554384 ], [ -86.716664, 33.554437 ], [ -86.717222, 33.554653 ], [ -86.717741, 33.554882 ], [ -86.71829, 33.55513 ], [ -86.718989, 33.555448 ], [ -86.719231, 33.555565 ], [ -86.719603, 33.555718 ], [ -86.72003, 33.555922 ], [ -86.720245, 33.556011 ], [ -86.720876, 33.556274 ], [ -86.721511, 33.556456 ], [ -86.7218, 33.556658 ], [ -86.722005, 33.556801 ], [ -86.722554, 33.557118 ], [ -86.723614, 33.556025 ], [ -86.723496, 33.555943 ], [ -86.723105, 33.55567 ], [ -86.72257, 33.555315 ], [ -86.723206, 33.554639 ], [ -86.723346, 33.554398 ], [ -86.72355, 33.554328 ], [ -86.723773, 33.55433 ], [ -86.724149, 33.554569 ], [ -86.724693, 33.554905 ], [ -86.725189, 33.555287 ], [ -86.725711, 33.555637 ], [ -86.726233, 33.555992 ], [ -86.726803, 33.556395 ], [ -86.727325, 33.556739 ], [ -86.727847, 33.557094 ], [ -86.728377, 33.557456 ], [ -86.728891, 33.557804 ], [ -86.729406, 33.558156 ], [ -86.729942, 33.55852 ], [ -86.730464, 33.558875 ], [ -86.731032, 33.559263 ], [ -86.729985, 33.560323 ], [ -86.728915, 33.561436 ], [ -86.728813, 33.56155 ], [ -86.727701, 33.563268 ], [ -86.726796, 33.564729 ], [ -86.72602, 33.565979 ], [ -86.725629, 33.56581 ], [ -86.725297, 33.565678 ], [ -86.725149, 33.565606 ], [ -86.724716, 33.565426 ], [ -86.724148, 33.565125 ], [ -86.72354, 33.564846 ], [ -86.722958, 33.56459 ], [ -86.722363, 33.564322 ], [ -86.721769, 33.564055 ], [ -86.721173, 33.563787 ], [ -86.720598, 33.563519 ], [ -86.719989, 33.563246 ], [ -86.719394, 33.562973 ], [ -86.718812, 33.562717 ], [ -86.718355, 33.56251 ], [ -86.718218, 33.562438 ], [ -86.717693, 33.562221 ], [ -86.717621, 33.562149 ], [ -86.717117, 33.56196 ], [ -86.716665, 33.56175 ], [ -86.716194, 33.561544 ], [ -86.715728, 33.561336 ], [ -86.715252, 33.56112 ], [ -86.714781, 33.560924 ], [ -86.714634, 33.561079 ], [ -86.714179, 33.560881 ], [ -86.712726, 33.560236 ], [ -86.712584, 33.560436 ], [ -86.710637, 33.559646 ], [ -86.709203, 33.559076 ], [ -86.708861, 33.558979 ], [ -86.708777, 33.558949 ], [ -86.707958, 33.558616 ], [ -86.707954, 33.558597 ], [ -86.707908, 33.558385 ], [ -86.707745, 33.558329 ], [ -86.70031, 33.555477 ], [ -86.700624, 33.554907 ], [ -86.701888, 33.552604 ], [ -86.702782, 33.55136 ], [ -86.703551, 33.550291 ], [ -86.703873, 33.549843 ], [ -86.704188, 33.549404 ], [ -86.707152, 33.54685 ], [ -86.707466, 33.546616 ], [ -86.707683, 33.546405 ], [ -86.707985, 33.545992 ], [ -86.708486, 33.54538 ], [ -86.709318, 33.544365 ], [ -86.709708, 33.543917 ], [ -86.712316, 33.541302 ], [ -86.712613, 33.541023 ], [ -86.712812, 33.540799 ], [ -86.713745, 33.539422 ], [ -86.713902, 33.539302 ] ] ] } },
{ "type": "Feature", "properties": { "STATEFP10": "01", "COUNTYFP10": "073", "TRACTCE10": "004000", "GEOID10": "01073004000", "NAME10": "40", "NAMELSAD10": "Census Tract 40", "MTFCC10": "G5020", "FUNCSTAT10": "S", "ALAND10": 2364675, "AWATER10": 0, "INTPTLAT10": "+33.4953245", "INTPTLON10": "-086.8516236" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -86.854631, 33.487542 ], [ -86.855535, 33.486967 ], [ -86.856538, 33.486322 ], [ -86.857046, 33.485979 ], [ -86.859584, 33.484308 ], [ -86.860059, 33.484021 ], [ -86.86053, 33.483714 ], [ -86.860755, 33.484005 ], [ -86.861035, 33.484352 ], [ -86.861374, 33.484803 ], [ -86.861571, 33.485078 ], [ -86.861742, 33.485286 ], [ -86.862107, 33.48576 ], [ -86.862507, 33.486283 ], [ -86.862427, 33.486486 ], [ -86.861595, 33.487797 ], [ -86.861383, 33.488126 ], [ -86.861291, 33.488236 ], [ -86.861169, 33.488332 ], [ -86.860211, 33.488939 ], [ -86.859816, 33.489189 ], [ -86.859266, 33.489539 ], [ -86.85833, 33.490136 ], [ -86.857567, 33.490628 ], [ -86.856969, 33.491013 ], [ -86.857431, 33.491536 ], [ -86.857855, 33.492015 ], [ -86.858305, 33.492492 ], [ -86.858765, 33.492994 ], [ -86.859221, 33.493495 ], [ -86.859706, 33.494023 ], [ -86.860181, 33.494549 ], [ -86.860593, 33.49501 ], [ -86.860637, 33.495047 ], [ -86.861002, 33.495451 ], [ -86.861354, 33.495838 ], [ -86.861804, 33.495554 ], [ -86.861962, 33.495725 ], [ -86.862138, 33.495924 ], [ -86.862286, 33.496091 ], [ -86.862342, 33.496145 ], [ -86.862401, 33.496183 ], [ -86.86248, 33.496203 ], [ -86.862523, 33.496203 ], [ -86.862657, 33.496178 ], [ -86.862747, 33.496129 ], [ -86.865424, 33.494422 ], [ -86.865495, 33.494501 ], [ -86.865766, 33.494783 ], [ -86.86583, 33.49488 ], [ -86.865848, 33.494895 ], [ -86.865876, 33.494905 ], [ -86.8659, 33.494906 ], [ -86.865968, 33.494895 ], [ -86.866332, 33.494779 ], [ -86.866881, 33.494588 ], [ -86.866885, 33.495165 ], [ -86.866882, 33.495367 ], [ -86.862491, 33.497006 ], [ -86.860903, 33.497855 ], [ -86.856845, 33.500199 ], [ -86.854735, 33.500131 ], [ -86.853969, 33.50011 ], [ -86.852812, 33.500079 ], [ -86.851686, 33.500051 ], [ -86.851194, 33.500034 ], [ -86.850371, 33.500006 ], [ -86.849077, 33.499969 ], [ -86.848377, 33.499947 ], [ -86.848378, 33.500321 ], [ -86.848378, 33.500493 ], [ -86.848378, 33.500678 ], [ -86.84838, 33.50083 ], [ -86.848379, 33.501047 ], [ -86.848371, 33.501387 ], [ -86.848002, 33.501458 ], [ -86.847752, 33.50152 ], [ -86.847574, 33.501633 ], [ -86.847191, 33.501852 ], [ -86.847057, 33.501953 ], [ -86.847392, 33.502364 ], [ -86.847897, 33.502965 ], [ -86.848046, 33.503141 ], [ -86.848257, 33.503394 ], [ -86.848336, 33.503486 ], [ -86.848157, 33.503554 ], [ -86.847219, 33.503858 ], [ -86.846292, 33.504089 ], [ -86.843699, 33.504587 ], [ -86.843203, 33.504334 ], [ -86.843156, 33.504274 ], [ -86.842845, 33.503874 ], [ -86.842556, 33.503511 ], [ -86.842525, 33.503473 ], [ -86.842194, 33.503061 ], [ -86.841884, 33.502679 ], [ -86.841545, 33.50226 ], [ -86.84134, 33.502009 ], [ -86.841222, 33.501863 ], [ -86.840803, 33.50135 ], [ -86.840537, 33.501444 ], [ -86.840278, 33.501514 ], [ -86.840164, 33.501534 ], [ -86.839567, 33.50155 ], [ -86.839587, 33.500984 ], [ -86.839566, 33.500861 ], [ -86.8395, 33.500695 ], [ -86.839428, 33.500655 ], [ -86.839565, 33.500597 ], [ -86.839661, 33.500445 ], [ -86.839589, 33.500419 ], [ -86.839586, 33.499992 ], [ -86.839591, 33.499212 ], [ -86.83959, 33.498823 ], [ -86.839596, 33.498071 ], [ -86.839592, 33.497372 ], [ -86.839593, 33.496633 ], [ -86.840423, 33.496212 ], [ -86.841274, 33.495781 ], [ -86.842156, 33.495302 ], [ -86.843044, 33.494824 ], [ -86.844948, 33.493727 ], [ -86.846142, 33.493001 ], [ -86.84733, 33.492275 ], [ -86.849147, 33.49107 ], [ -86.85242, 33.488964 ], [ -86.85329, 33.488405 ], [ -86.853724, 33.488135 ], [ -86.854431, 33.48767 ], [ -86.854631, 33.487542 ] ] ] } },
{ "type": "Feature", "properties": { "STATEFP10": "01", "COUNTYFP10": "073", "TRACTCE10": "003802", "GEOID10": "01073003802", "NAME10": "38.02", "NAMELSAD10": "Census Tract 38.02", "MTFCC10": "G5020", "FUNCSTAT10": "S", "ALAND10": 3245083, "AWATER10": 0, "INTPTLAT10": "+33.4785702", "INTPTLON10": "-086.8900020" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -86.903171, 33.471768 ], [ -86.902839, 33.472127 ], [ -86.902225, 33.47278 ], [ -86.901954, 33.473078 ], [ -86.901641, 33.473416 ], [ -86.90141, 33.473658 ], [ -86.900796, 33.474323 ], [ -86.900673, 33.474449 ], [ -86.898775, 33.47648 ], [ -86.898283, 33.477004 ], [ -86.897626, 33.477708 ], [ -86.896539, 33.478865 ], [ -86.896263, 33.479165 ], [ -86.895093, 33.48043 ], [ -86.894581, 33.480994 ], [ -86.894456, 33.481132 ], [ -86.894021, 33.481637 ], [ -86.893961, 33.48169 ], [ -86.893382, 33.482321 ], [ -86.893258, 33.482453 ], [ -86.892746, 33.482886 ], [ -86.892337, 33.483302 ], [ -86.891263, 33.484422 ], [ -86.889966, 33.485827 ], [ -86.888649, 33.487226 ], [ -86.887767, 33.488186 ], [ -86.887057, 33.488954 ], [ -86.886728, 33.489294 ], [ -86.886305, 33.489638 ], [ -86.885219, 33.490482 ], [ -86.884374, 33.491114 ], [ -86.883539, 33.491751 ], [ -86.882698, 33.492396 ], [ -86.881861, 33.493024 ], [ -86.881309, 33.49345 ], [ -86.881062, 33.49364 ], [ -86.880805, 33.493833 ], [ -86.880411, 33.494131 ], [ -86.88002, 33.494431 ], [ -86.879626, 33.494732 ], [ -86.878975, 33.495227 ], [ -86.878498, 33.494588 ], [ -86.878186, 33.494157 ], [ -86.87756, 33.493268 ], [ -86.877271, 33.49286 ], [ -86.876969, 33.492421 ], [ -86.876655, 33.491999 ], [ -86.87634, 33.491571 ], [ -86.876027, 33.491146 ], [ -86.875717, 33.49072 ], [ -86.875757, 33.490693 ], [ -86.876011, 33.49057 ], [ -86.876337, 33.490403 ], [ -86.876675, 33.490237 ], [ -86.877239, 33.489948 ], [ -86.877115, 33.489769 ], [ -86.87693, 33.489514 ], [ -86.876672, 33.48915 ], [ -86.876629, 33.48908 ], [ -86.876559, 33.488988 ], [ -86.87702, 33.488297 ], [ -86.878015, 33.486803 ], [ -86.878888, 33.485874 ], [ -86.879365, 33.485176 ], [ -86.879779, 33.484583 ], [ -86.880179, 33.484014 ], [ -86.880814, 33.483217 ], [ -86.884791, 33.478206 ], [ -86.889113, 33.472352 ], [ -86.889455, 33.47189 ], [ -86.890077, 33.471066 ], [ -86.89052, 33.47046 ], [ -86.890747, 33.470166 ], [ -86.891276, 33.469468 ], [ -86.891699, 33.468898 ], [ -86.892196, 33.4683 ], [ -86.895724, 33.464729 ], [ -86.897917, 33.46251 ], [ -86.899914, 33.460456 ], [ -86.90018, 33.460195 ], [ -86.900652, 33.459741 ], [ -86.900614, 33.460009 ], [ -86.900652, 33.460414 ], [ -86.900791, 33.461207 ], [ -86.900777, 33.463217 ], [ -86.900792, 33.464061 ], [ -86.900791, 33.464861 ], [ -86.900368, 33.466247 ], [ -86.900487, 33.467212 ], [ -86.900642, 33.468474 ], [ -86.900602, 33.469456 ], [ -86.900561, 33.471007 ], [ -86.900554, 33.471353 ], [ -86.900515, 33.471735 ], [ -86.902983, 33.471766 ], [ -86.903171, 33.471768 ] ] ] } }
]
}

View file

@ -0,0 +1,119 @@
import pytest
import pandas as pd
import geopandas as gpd
from pathlib import Path
from data_pipeline.etl.sources.census_acs.etl import CensusACSETL
def _check_fields_exist(df: pd.DataFrame, field_names: list):
for field in field_names:
assert field in df.columns
@pytest.fixture
def transform_census_input_fixture() -> pd.DataFrame:
"""
Load the Census input data for the transform method tests.
Returns:
Dataframe: the input data
"""
file = (
Path(__file__).parents[0]
/ "data"
/ "transform"
/ "acs_transform_input.pkl"
)
return pd.read_pickle(file)
@pytest.fixture
def transform_census_geojson_fixture() -> gpd.GeoDataFrame:
"""
Load the Census GeoJSON input data for the transform method.
Returns:
Dataframe: the Census GeoJSON input data
"""
file = (
Path(__file__).parents[0]
/ "data"
/ "transform"
/ "acs_transform_geojson.geojson"
)
return gpd.read_file(file)
@pytest.fixture
def transformed_data_fixture(
transform_census_input_fixture: pd.DataFrame,
transform_census_geojson_fixture: gpd.GeoDataFrame,
) -> pd.DataFrame:
"""
Tranform the test input data.
Returns:
DataFrame: the transformed data
"""
acs = CensusACSETL()
acs.df = transform_census_input_fixture
acs.geo_df = transform_census_geojson_fixture
acs.transform()
return acs.output_df
#################
# Transform tests
#################
def test_poverty_fields(transformed_data_fixture: pd.DataFrame):
result = transformed_data_fixture
# Test that the poverty and collect fields were added.
acs = CensusACSETL()
fields_to_test = [
acs.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME,
acs.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME,
]
_check_fields_exist(result, fields_to_test)
assert (
result.iloc[0][acs.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME]
== 1743
)
assert (
result.iloc[0][acs.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME]
== 700
)
assert (
result.iloc[1][acs.POVERTY_LESS_THAN_200_PERCENT_FPL_COUNT_FIELD_NAME]
== 941
)
assert (
result.iloc[1][acs.POVERTY_LESS_THAN_100_PERCENT_FPL_COUNT_FIELD_NAME]
== 548
)
def test_college_undergrad_fields(transformed_data_fixture: pd.DataFrame):
result = transformed_data_fixture
# Test that the poverty and collect fields were added.
acs = CensusACSETL()
fields_to_test = [
acs.OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD,
acs.OFFCAMPUS_UNDERGRADUATE_FIELD,
acs.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD,
acs.OFFCAMPUS_UNIVERSITY_FIELD,
]
_check_fields_exist(result, fields_to_test)
assert result.iloc[0][acs.OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD] == 0
assert result.iloc[0][acs.OFFCAMPUS_UNDERGRADUATE_FIELD] == 296
assert result.iloc[0][acs.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD] == 44
assert result.iloc[0][acs.OFFCAMPUS_UNIVERSITY_FIELD] == 340
assert result.iloc[1][acs.OFFCAMPUS_UNDERGRADUATE_POVERTY_FIELD] == 45
assert result.iloc[1][acs.OFFCAMPUS_UNDERGRADUATE_FIELD] == 97
assert result.iloc[1][acs.OFFCAMPUS_UNIVERSITY_POVERTY_FIELD] == 45
assert result.iloc[1][acs.OFFCAMPUS_UNIVERSITY_FIELD] == 128

View file

@ -80,7 +80,7 @@ def generate_tiles(data_path: Path, generate_tribal_layer: bool) -> None:
USA_TRIBAL_MAX_ZOOM = 11 USA_TRIBAL_MAX_ZOOM = 11
tribal_tiles_path = data_path / "tribal" / "tiles" tribal_tiles_path = data_path / "tribal" / "tiles"
tribal_geojson_dir = data_path / "tribal" / "geojson" tribal_geojson_dir = data_path / "tribal" / "geographic_data"
# remove existing mbtiles file # remove existing mbtiles file
remove_all_from_dir(tribal_tiles_path) remove_all_from_dir(tribal_tiles_path)

View file

@ -0,0 +1,184 @@
# Artifacts
The below sections show how the `data/data-pipeline/data_pipeline/data` directory evolves as you run each workflow step in sequence:
## 1. `etl-run`
## 2. `score-run`
```diff
.
├── score
│   ├── csv
+ │   │   └── full
+ │   │   └── usa.csv
│   ├── downloadable
│   ├── geojson
│   ├── shapefile
│   └── tiles
└── tribal
├── csv
+ ├── geographic_data
+ │   └── usa.json
├── geojson
└── tiles
```
## 3. `generate-score-post`
```diff
.
├── score
│   ├── csv
+ │   │   ├── full
+ │   │   │   ├── usa_counties.csv
│   │   │   └── usa.csv
+ │   │   └── tiles
+ │   │   ├── tile_indexes.json
│   │   └── usa.csv
│   ├── downloadable
+ │   │   ├── beta-codebook.csv
+ │   │   ├── beta-communities.csv
+ │   │   ├── beta-communities-csv.zip
+ │   │   ├── beta-communities.xlsx
+ │   │   ├── beta-communities-xls.zip
+ │   │   └── beta-data-documentation.zip
│   ├── geojson
+ │   ├── search
+ │   │   └── tracts.json
│   ├── shapefile
│   └── tiles
└── tribal
├── csv
├── geographic_data
│   └── usa.json
├── geojson
└── tiles
```
## 4. `geo-score`
```diff
.
├── score
│   ├── csv
│   │   ├── full
│   │   │   ├── usa_counties.csv
│   │   │   └── usa.csv
│   │   └── tiles
│   │   ├── tile_indexes.json
│   │   └── usa.csv
│   ├── downloadable
│   │   ├── beta-codebook.csv
│   │   ├── beta-communities.csv
│   │   ├── beta-communities-csv.zip
│   │   ├── beta-communities.xlsx
│   │   ├── beta-communities-xls.zip
│   │   ├── beta-data-documentation.zip
+ │   │   └── beta-shapefile-codebook.zip
│   ├── geojson
+ │   │   ├── usa-high.json
+ │   │   └── usa-low.json
│   ├── search
│   │   └── tracts.json
│   ├── shapefile
+ │   │   ├── usa.cpg
+ │   │   ├── usa.dbf
+ │   │   ├── usa.prj
+ │   │   ├── usa.shp
+ │   │   ├── usa.shx
+ │   │   └── usa.zip
│   └── tiles
└── tribal
├── csv
├── geographic_data
│   └── usa.json
├── geojson
└── tiles
```
## 5. `generate-map-tiles`
```diff
.
├── score
│   ├── ...
│   └── tiles
+ │   ├── high
+ │   │   ├── 5
+ │   │   │   └── 0
+ │   │   │      ├── ...
+ │   │   │   └── 17.pbf
+ │   ├── ...
+ │   │   ├── 11
+ │   │   │   ├── 4
+ │   │   │   │   ├── 679.pbf
+ │   │   │   │   ├── ...
+ │   │   │   │   └── 684.pbf
+ │   │   │   ├── ...
+ │   │   │   └── 2047
+ │   │   │   ├── 676.pbf
+ │   │   │   └── 677.pbf
+ │   │   ├── metadata.json
+ │   │   └── usa_high.mbtiles
+ │   └── low
+ │   ├── 0
+ │   │   └── 0
+ │   │   └── 0.pbf
+ │   ├── ...
+ │   ├── 7
+ │   │   ├── ...
+ │   │   ├── 40
+ │   │   │   ├── ...
+ │   │   │   └── 57.pbf
+ │   │   └── 41
+ │   │      └── 57.pbf
+ │   ├── metadata.json
+ │   └── usa_low.mbtiles
└── tribal
└── ...
```
## 5. `etl-run --dataset tribal`
```diff
.
├── score
│   └── ...
└── tribal
├── csv
├── geographic_data
│   └── usa.json
├── geojson
└── tiles
+ └── usa.mbtiles
```
## 6. ` generate-map-tiles --generate-tribal-layer`
```diff
.
├── score
│   └── ...
└── tribal
├── csv
├── geographic_data
│   └── usa.json
├── geojson
└── tiles
+ ├── 0
+ │   └── 0
+ │   └── 0.pbf
+ ├── 1
+ │   └── 0
+ │   └── 0.pbf
+ ├── 11
+ │   ├── 32
+ │   │   └── 674.pbf
+ │   ├── ...
+ │   └── 642
+ │      ├── 736.pbf
+ │      └── 737.pbf
+ ├── metadata.json
└── usa.mbtiles
```