mirror of
https://github.com/DOI-DO/j40-cejst-2.git
synced 2025-02-21 09:11:26 -08:00
Merge pull request #80 from agilesix/cfelix/merge-v2-20250122
CEQ-J40 merge v2 code - 20250122
This commit is contained in:
commit
a2779cb7a6
44 changed files with 2446 additions and 826 deletions
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
@ -25,7 +25,7 @@ on:
|
|||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
|
|
2
.github/workflows/compile_mermaid.yml
vendored
2
.github/workflows/compile_mermaid.yml
vendored
|
@ -7,7 +7,7 @@ on:
|
|||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout Project
|
||||
|
|
2
.github/workflows/deploy_backend_main.yml
vendored
2
.github/workflows/deploy_backend_main.yml
vendored
|
@ -14,7 +14,7 @@ env:
|
|||
|
||||
jobs:
|
||||
generate-score-tiles:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: data/data-pipeline
|
||||
|
|
2
.github/workflows/deploy_frontend_main.yml
vendored
2
.github/workflows/deploy_frontend_main.yml
vendored
|
@ -9,7 +9,7 @@ concurrency:
|
|||
cancel-in-progress: true
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
environment: Staging
|
||||
defaults:
|
||||
run:
|
||||
|
|
2
.github/workflows/ping-check.yml
vendored
2
.github/workflows/ping-check.yml
vendored
|
@ -15,7 +15,7 @@ on:
|
|||
description: 'Ping Check'
|
||||
jobs:
|
||||
check_site_uptime:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
environment: Staging
|
||||
name: Ping the site
|
||||
steps:
|
||||
|
|
5
.github/workflows/pr_backend.yml
vendored
5
.github/workflows/pr_backend.yml
vendored
|
@ -11,7 +11,7 @@ jobs:
|
|||
# JOB to run change detection
|
||||
detect-be-changes:
|
||||
name: Detect backend changes
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
# Required permissions
|
||||
permissions:
|
||||
pull-requests: read
|
||||
|
@ -130,8 +130,7 @@ jobs:
|
|||
- name: Generate Score Geo
|
||||
run: |
|
||||
poetry run python3 -m data_pipeline.application geo-score
|
||||
- name: Run smoketest for 1.0
|
||||
if: ${{ env.J40_VERSION_LABEL_STRING == '1.0' }}
|
||||
- name: Run smoketests
|
||||
run: |
|
||||
poetry run pytest data_pipeline/ -m smoketest
|
||||
- name: Set timezone for tippecanoe
|
||||
|
|
2
.github/workflows/pr_frontend.yml
vendored
2
.github/workflows/pr_frontend.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
# JOB to run change detection
|
||||
detect-fe-changes:
|
||||
name: Detect frontend changes
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
# Required permissions
|
||||
permissions:
|
||||
pull-requests: read
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
dirs:
|
||||
- .
|
||||
ignorePatterns:
|
||||
- pattern: '^http://localhost.*$'
|
||||
- pattern: '^http://localhost.*$'
|
||||
excludedFiles:
|
||||
- ./DATASETS.md
|
70
DATASETS.md
70
DATASETS.md
|
@ -1,38 +1,38 @@
|
|||
# Justice40 Datasets
|
||||
|
||||
Below is a table of all datasets that feed the CEJST application, including access links and contacts.
|
||||
Below is a table of all datasets that feed the CEJST application, including access links, contacts, and update information.
|
||||
|
||||
| **Indicator Group** | **Indicator** | **Description** | **Notes** | **Publisher** | **Year(s)** | **Source** | **Geography** | **Geographies available** | **Can be updated to 2020 Census Tracts?** | **Contact** | **Current Data Download** |
|
||||
| ------------------------- | -------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------- | ---------------------------------------------------- | ------------------------- | ------------------------------------------------------------------------------------------------------ | --------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Climate Change** | Expected Agriculture Loss Rate | Economic loss to agricultural value resulting from natural hazards each year | | Federal Emergency Management Agency | 2014-2021 | National Risk Index | US & District of Columbia | 2010 Census Tract | Data to be released End of March 2023 | Caset Zuzak, NHRAP Senior Risk Analyst. (Casey.Zuzak@fema.dhs.gov); Karen Villatoro (karen.villatoro@fema.dhs.gov); Jesse Rozelle (Jesse.Rozelle@fema.dhs.gov); Sean McNabb (Sean.McNabb@fema.dhs.gov); Charles Carson (charles.carson@fema.dhs.gov) | https://hazards.fema.gov/nri/data-resources |
|
||||
| **Climate Change** | Expected Building Loss Rate | Economic loss to building value resulting from natural hazards each year | | Federal Emergency Management Agency | 2014-2021 | National Risk Index | US & District of Columbia | 2010 Census Tract | Data to be released End of March 2023 | Caset Zuzak, NHRAP Senior Risk Analyst. (Casey.Zuzak@fema.dhs.gov); Karen Villatoro (karen.villatoro@fema.dhs.gov); Jesse Rozelle (Jesse.Rozelle@fema.dhs.gov); Sean McNabb (Sean.McNabb@fema.dhs.gov); Charles Carson (charles.carson@fema.dhs.gov) | https://hazards.fema.gov/nri/data-resources |
|
||||
| **Climate Change** | Expected Population Loss Rate | fatalities and injuries resulting from natural hazards each year | this burden only applies for census tracts with populations greater than 20 people. | Federal Emergency Management Agency | 2014-2021 | National Risk Index | US & District of Columbia | 2010 Census Tract | Data to be released End of March 2023 | Caset Zuzak, NHRAP Senior Risk Analyst. (Casey.Zuzak@fema.dhs.gov); Karen Villatoro (karen.villatoro@fema.dhs.gov); Jesse Rozelle (Jesse.Rozelle@fema.dhs.gov); Sean McNabb (Sean.McNabb@fema.dhs.gov); Charles Carson (charles.carson@fema.dhs.gov) | https://hazards.fema.gov/nri/data-resources |
|
||||
| **Climate Change** | Projected Flood Risk | projected risk to properties from projected floods, from tides, raid, riverine and storm surges within 30 years | these were emailed to J40 initially | First Street Foundation | projecting 2022-2052. Released in 2020 | | 50 states, DC, PR | 2010 Census Tract (we think, but documentation does not say) | Updated is available | Ed Kearns, Chief Data Officer of First Street Foundation. (ed@firststreet.org) | https://aws.amazon.com/marketplace/pp/prodview-r36lzzzjacd32?sr=0-1&ref_=beagle&applicationId=AWSMPContessa#overview |
|
||||
| **Climate Change** | Projected Wildfire Risk | projected risk to properties form wildfire from fire fuels, weather, humans, and fire movement in 30 years | these were emailed to J40 initially | First Street Foundation | projecting 2022-2052. Released in 2020 | | 50 states, DC, PR | 2010 Census Tract (we think, but documentation does not say) | Updated is available | Ed Kearns, Chief Data Officer of First Street Foundation. (ed@firststreet.org) | https://aws.amazon.com/marketplace/pp/prodview-r36lzzzjacd32?sr=0-1&ref_=beagle&applicationId=AWSMPContessa#overview |
|
||||
| **Energy** | Energy Cost | Average annual energy costs divided by household income | | DOE | 2018 | LEAD Tool | 50 states, DC, PR | Census 2010 | Yes, in March 2023 | Aaron Vimont, developer at National Renewable Energy Laboratory. (aaron.vimont@nrel.gov); Toy Reames (tony.reames@hq.doe.gov) | "https://data.openei.org/submissions/573 https://www.energy.gov/scep/slsc/lead-tool |
|
||||
| **Energy** | PM2.5 in the air | level of inhalable particles, 2.5 micrometers or smaller | | Environmental Protection Agency (EPA) Office of Air and Radiation (OAR) | 2017 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Health** | Asthma | Share of people who have been told they have asthma | New Data Source: https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Local-Data-for-Better-Health-Census-Tract-D/cwsq-ngmh | CDC | 2016-2019 | PLACES data | 50 States + DC | Census 2010 Tracts, Census 2010 & 2020 Counties | Not until December 2024 | T.J. Pierce (pwc2@cdc.gov); Sharunda Buchanan (sdb4@cdc.gov); Andrew Dent (aed5@cdc.gov) Angela Werner (myo6@cdc.gov) | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Census-Tract-Data-GIS-Friendly-Format-2021-/mb5y-ytti |
|
||||
| **Health** | Diabetes | Share of people ages 18+ who have diabetes other than diabetes during pregnancy | New Data Source: https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Local-Data-for-Better-Health-Census-Tract-D/cwsq-ngmh | CDC | 2016-2019 | PLACES data | 50 States + DC | Census 2010 Tracts, Census 2010 & 2020 Counties | Not until December 2024 | T.J. Pierce (pwc2@cdc.gov); Sharunda Buchanan (sdb4@cdc.gov); Andrew Dent (aed5@cdc.gov) Angela Werner (myo6@cdc.gov) | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Census-Tract-Data-GIS-Friendly-Format-2021-/mb5y-ytti |
|
||||
| **Health** | Heart Disease | Share of people ages 18+ who have been told they have heart disease | New Data Source: https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Local-Data-for-Better-Health-Census-Tract-D/cwsq-ngmh | CDC | 2016-2019 | PLACES data | 50 States + DC | Census 2010 Tracts, Census 2010 & 2020 Counties | Not until December 2024 | T.J. Pierce (pwc2@cdc.gov); Sharunda Buchanan (sdb4@cdc.gov); Andrew Dent (aed5@cdc.gov) Angela Werner (myo6@cdc.gov) | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Census-Tract-Data-GIS-Friendly-Format-2021-/mb5y-ytti |
|
||||
| **Health** | Low life expectancy | Average number of years a person can expect to live | | CDC | 2010-2015 | US Small Area Life Expectancy Estimates Project | 50 States + DC | Census 2010 | 2025 | Elizabeth Arias (efa3@cdc.gov) | https://www.cdc.gov/nchs/nvss/usaleep/usaleep.html#life-expectancy |
|
||||
| **Housing** | Historic Underinvestment | Census tracts that experienced historic underinvestment based on redlining maps created by the federal government’s Home Owners’ Loan Corporation (HOLC) between 1935 and 1940. | | National Community Reinvestment Coalition (NCRC) | | Home Owners Loan Corporation | 50 States + DC | Census 2010 & 2020 | Yes | | https://www.openicpsr.org/openicpsr/project/141121/version/V2/view |
|
||||
| **Housing** | Housing Cost | Share of households making less than 80% of the AMI and spending more than 30% of income on housing | maybe could be found in ACS | Department of Housing and Urban Development (HUD) | 2014-2018 | Comprehensive housing affordability strategy dataset | 50 States + DC+ PR | Census 2010 | Early Summer 2023 | Blair Russell, Office of Policy Development and Research; HUD (Blair.D.Russell@hud.gov) | https://www.huduser.gov/portal/datasets/cp.html#2006-2019_data |
|
||||
| **Housing** | Lack of Green Space | Amount of land, not including crop land, that is covered with artificial materials like concrete or pavement | | Multi-Resolution Land Characteristics Consortium | 2019 | National Land Cover Database (USGS) | 48 States + DC | Possibly not bound to geographies because its raster data. TPL imputed to census 2010 for us, I think. | Maybe? Use same data but pre process to Census 2020 | | Was provided by the trust for public land but you can also get it here as image data https://www.sciencebase.gov/catalog/item/5f21cef582cef313ed940043 |
|
||||
| **Housing** | Lack of Indoor Plumbing | Share of homes without indoor kitchens or plumbing | maybe could be found in ACS | Department of Housing and Urban Development (HUD) | 2014-2018 | Comprehensive housing affordability strategy dataset | 50 States + DC + PR | Census 2010 | Early Summer 2023 | Blair Russell, Office of Policy Development and Research; HUD (Blair.D.Russell@hud.gov) | https://www.huduser.gov/portal/datasets/cp.html#2006-2019_data |
|
||||
| **Housing** | Lead paint | Share of homes that are likely to have lead paint | Share of homes built before 1960, which indicates potential lead paint exposure. Tracts with extremely high home values (i.e. median home values above the 90th percentile) that are less likely to face health risks from lead paint exposure are not included. | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021 | |
|
||||
| **Legacy Pollution** | Abandoned Mine Land | Presence of one or more abandoned min land within the tract | | Department of the Interior, Office of Surface Mining Reclamation and Enforcement | 2017 | Abandoned Mine Land Inventory System | 50 States + DC | Point Data | Yes, points can be mapped to any geography | | https://www.osmre.gov/programs/e-amlis |
|
||||
| **Legacy Pollution** | Formerly used Defense Site | Presence of one or more formerly used defense site within the tract | | US Army Corps of Engineers | 2019 | Formerly Used Defense Sites | 50 States + DC | Point Data | Yes, points can be mapped to any geography | | https://www.usace.army.mil/Missions/Environmental/Formerly-Used-Defense-Sites/ |
|
||||
| **Legacy Pollution** | Proximity to Hazardous Waste Facilities | count of hazardous waste facilities within 5 km | | EPA | 2020 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Legacy Pollution** | Proximity to Risk Management Plan Facilities | count of risk management plan facilities within 5 kilometers | | EPA | 2020 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Legacy Pollution** | Proximity to Superfund Sites | count of proposed or listed superfund or national priorities list sites within 5 km | | EPA | 2021 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Transportation** | Diesel particulate matter exposure | amount of diesel exhaust in the air | | EPA | 2014 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Transportation** | transportation barriers | average of relative cost and time spent on transportation | | DOT | 2022 | Transportation Access Disadvantage | 50 States + DC | Census 2020 | Yes | | https://www.transportation.gov/equity-Justice40#:~:text=Transportation%20access%20disadvantage%20identifies%20communities%20and%20places%20that%20spend%20more%2C%20and%20take%20longer%2C%20to%20get%20where%20they%20need%20to%20go.%20(4) |
|
||||
| **Transportation** | traffic proximity and volume | count of vehicles at major roads within 500 meters | | DOT (via EPA) | 2017 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Water and Wastewater** | underground storage tanks and releases | formula of the density of leaking underground storage tanks and number of all active underground storage tanks within 1500 feet of the census tract boundaries | | EPA /UST Finder | 2021 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Water and Wastewater** | wastewater discharge | modeled toxic concentrations at parts of streams within 500 meters | | EPA Risk Screening Environmental Indicators | 2020 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Workforce Development** | Linguistic isolation | Share of households where no one over age 14 speaks English very well | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021 | |
|
||||
| **Workforce Development** | low median income | comparison of median income in the tract to the median incomes in the area | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021 | |
|
||||
| **Workforce Development** | poverty | share of people in households where income is at or below 100% of the federal poverty level | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021 | |
|
||||
| **Workforce Development** | Unemployment | number of unemployed people as a part of the labor force | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021 | |
|
||||
| **Workforce Development** | High school Education | Percent of people ages 25 or older whose high school education is less than a high school diploma | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021 | |
|
||||
| **Multiple Factors** | Low income | People in households where income is less than or equal to twice the federal poverty level, not including students enrolled in higher ed | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021 | |
|
||||
| **Indicator Group** | **Indicator** | **Description** | **Notes** | **Publisher** | **Year(s)** | **Source** | **Geography** | **Geographies available** | **Can be updated to 2020 Census Tracts?** | **How to update** | **Contact** | **Current Data Download** | **Updated Data Download** |
|
||||
| ------------------------- | -------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------- | ---------------------------------------------------- | ------------------------- | ------------------------------------------------------------------------------------------------------ | --------------------------------------------------- | -------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Climate Change** | Expected Agriculture Loss Rate | Economic loss to agricultural value resulting from natural hazards each year | Field names are set in [datasets.yml](data/data-pipeline/data_pipeline/etl/score/config/datasets.yml). | Federal Emergency Management Agency | 2014-2021 | National Risk Index | US & District of Columbia | 2010 Census Tract | Yes | Update init function with new NRI_Table_CensusTracts.zip references in S3 & originating hazards.fema.gov download URL. | Caset Zuzak, NHRAP Senior Risk Analyst. (Casey.Zuzak@fema.dhs.gov); Karen Villatoro (karen.villatoro@fema.dhs.gov); Jesse Rozelle (Jesse.Rozelle@fema.dhs.gov); Sean McNabb (Sean.McNabb@fema.dhs.gov); Charles Carson (charles.carson@fema.dhs.gov) | https://hazards.fema.gov/nri/data-resources | https://hazards.fema.gov/nri/data-resources |
|
||||
| **Climate Change** | Expected Building Loss Rate | Economic loss to building value resulting from natural hazards each year | Field names are set in [datasets.yml](data/data-pipeline/data_pipeline/etl/score/config/datasets.yml). | Federal Emergency Management Agency | 2014-2021 | National Risk Index | US & District of Columbia | 2010 Census Tract | Yes | Update init function with new NRI_Table_CensusTracts.zip references in S3 & originating hazards.fema.gov download URL. | Caset Zuzak, NHRAP Senior Risk Analyst. (Casey.Zuzak@fema.dhs.gov); Karen Villatoro (karen.villatoro@fema.dhs.gov); Jesse Rozelle (Jesse.Rozelle@fema.dhs.gov); Sean McNabb (Sean.McNabb@fema.dhs.gov); Charles Carson (charles.carson@fema.dhs.gov) | https://hazards.fema.gov/nri/data-resources | https://hazards.fema.gov/nri/data-resources |
|
||||
| **Climate Change** | Expected Population Loss Rate | fatalities and injuries resulting from natural hazards each year | This burden only applies for census tracts with populations greater than 20 people. Field names are set in [datasets.yml](data/data-pipeline/data_pipeline/etl/score/config/datasets.yml). | Federal Emergency Management Agency | 2014-2021 | National Risk Index | US & District of Columbia | 2010 Census Tract | Yes | Update init function with new NRI_Table_CensusTracts.zip references in S3 & originating hazards.fema.gov download URL. | Caset Zuzak, NHRAP Senior Risk Analyst. (Casey.Zuzak@fema.dhs.gov); Karen Villatoro (karen.villatoro@fema.dhs.gov); Jesse Rozelle (Jesse.Rozelle@fema.dhs.gov); Sean McNabb (Sean.McNabb@fema.dhs.gov); Charles Carson (charles.carson@fema.dhs.gov) | https://hazards.fema.gov/nri/data-resources | https://hazards.fema.gov/nri/data-resources |
|
||||
| **Climate Change** | Projected Flood Risk | projected risk to properties from projected floods, from tides, raid, riverine and storm surges within 30 years | these were emailed to J40 initially. | First Street Foundation | projecting 2022-2052. Released in 2020 | | 50 states, DC, PR | 2010 Census Tract (we think, but documentation does not say) | Updated data is available | Request updated fsf_flood.zip from FSF, or potentially programmatically prepare an equivalent file using their API. Stage resulting assets in S3 and update init reference. | Ed Kearns, Chief Data Officer of First Street Foundation. (ed@firststreet.org) | https://aws.amazon.com/marketplace/pp/prodview-r36lzzzjacd32?sr=0-1&ref_=beagle&applicationId=AWSMPContessa#overview | Can request again through email, or try to use proprietary [API](https://docs.google.com/spreadsheets/d/1_MWAVl6IHvWupuPtzdvo8Mu2Z7o3IM-FsWreweS9Mpc/edit?gid=0#gid=0). |
|
||||
| **Climate Change** | Projected Wildfire Risk | projected risk to properties form wildfire from fire fuels, weather, humans, and fire movement in 30 years | these were emailed to J40 initially | First Street Foundation | projecting 2022-2052. Released in 2020 | | 50 states, DC, PR | 2010 Census Tract (we think, but documentation does not say) | Updated data is available | Request updated fsf_flood.zip from FSF, or potentially programmatically prepare an equivalent file using their API. Stage resulting assets in S3 and update init reference. | Ed Kearns, Chief Data Officer of First Street Foundation. (ed@firststreet.org) | https://aws.amazon.com/marketplace/pp/prodview-r36lzzzjacd32?sr=0-1&ref_=beagle&applicationId=AWSMPContessa#overview | Can request again through email, or try to use proprietary [API](https://docs.google.com/spreadsheets/d/1_MWAVl6IHvWupuPtzdvo8Mu2Z7o3IM-FsWreweS9Mpc/edit?gid=0#gid=0). |
|
||||
| **Energy** | Energy Cost | Average annual energy costs divided by household income | | DOE | 2018 | LEAD Tool | 50 states, DC, PR | Census 2010 | Yes, in March 2023 | To-Do | Aaron Vimont, developer at National Renewable Energy Laboratory. (aaron.vimont@nrel.gov); Toy Reames (tony.reames@hq.doe.gov) | https://data.openei.org/submissions/573 https://www.energy.gov/scep/slsc/lead-tool | To-Do |
|
||||
| **Energy** | PM2.5 in the air | level of inhalable particles, 2.5 micrometers or smaller | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | Environmental Protection Agency (EPA) Office of Air and Radiation (OAR) | 2017 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Health** | Asthma | Share of people who have been told they have asthma | The updated PLACES dataset is available. | CDC | 2016-2019 | PLACES data | 50 States + DC | Census 2010 Tracts, Census 2010 & 2020 Counties | Yes | Update init function with new PLACES__Local_Data_for_Better_Health__Census_Tract_Data_202X_release.csv references, including (1) the updated file staged in S3 and (2) the originating chronicdata.cdc.gov download URL. | T.J. Pierce (pwc2@cdc.gov); Sharunda Buchanan (sdb4@cdc.gov); Andrew Dent (aed5@cdc.gov) Angela Werner (myo6@cdc.gov) | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Census-Tract-Data-GIS-Friendly-Format-2021-/mb5y-ytti | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Local-Data-for-Better-Health-Census-Tract-D/cwsq-ngmh |
|
||||
| **Health** | Diabetes | Share of people ages 18+ who have diabetes other than diabetes during pregnancy | The updated PLACES dataset is available. | CDC | 2016-2019 | PLACES data | 50 States + DC | Census 2010 Tracts, Census 2010 & 2020 Counties | Yes | Update init function with new PLACES__Local_Data_for_Better_Health__Census_Tract_Data_202X_release.csv references, including (1) the updated file staged in S3 and (2) the originating chronicdata.cdc.gov download URL. | T.J. Pierce (pwc2@cdc.gov); Sharunda Buchanan (sdb4@cdc.gov); Andrew Dent (aed5@cdc.gov) Angela Werner (myo6@cdc.gov) | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Census-Tract-Data-GIS-Friendly-Format-2021-/mb5y-ytti | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Local-Data-for-Better-Health-Census-Tract-D/cwsq-ngmh |
|
||||
| **Health** | Heart Disease | Share of people ages 18+ who have been told they have heart disease | The updated PLACES dataset is available. | CDC | 2016-2019 | PLACES data | 50 States + DC | Census 2010 Tracts, Census 2010 & 2020 Counties | Yes | Update init function with new PLACES__Local_Data_for_Better_Health__Census_Tract_Data_202X_release.csv references, including (1) the updated file staged in S3 and (2) the originating chronicdata.cdc.gov download URL. | T.J. Pierce (pwc2@cdc.gov); Sharunda Buchanan (sdb4@cdc.gov); Andrew Dent (aed5@cdc.gov) Angela Werner (myo6@cdc.gov) | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Census-Tract-Data-GIS-Friendly-Format-2021-/mb5y-ytti | https://chronicdata.cdc.gov/500-Cities-Places/PLACES-Local-Data-for-Better-Health-Census-Tract-D/cwsq-ngmh |
|
||||
| **Health** | Low life expectancy | Average number of years a person can expect to live | | CDC | 2010-2015 | US Small Area Life Expectancy Estimates Project | 50 States + DC | Census 2010 | Update coming 2025 | Update init function once data becomes available. | Elizabeth Arias (efa3@cdc.gov) | https://www.cdc.gov/nchs/nvss/usaleep/usaleep.html#life-expectancy | To-Do |
|
||||
| **Housing** | Historic Underinvestment | Census tracts that experienced historic underinvestment based on redlining maps created by the federal government’s Home Owners’ Loan Corporation (HOLC) between 1935 and 1940. | | National Community Reinvestment Coalition (NCRC) | | Home Owners Loan Corporation | 50 States + DC | Census 2010 & 2020 | Yes | Update init function with new HRS_2020.xlsx reference in S3 | | https://www.openicpsr.org/openicpsr/project/141121/version/V2/view | https://www.openicpsr.org/openicpsr/project/141121/version/V2/view |
|
||||
| **Housing** | Housing Cost | Share of households making less than 80% of the AMI and spending more than 30% of income on housing | Maybe could be found in ACS? Also: There is a note about [suppressed fields](https://www.huduser.gov/portal/datasets/cp.html) in the updated datasets relative to pre-2018 data. The impacted categories come from tables that do not appear to be used in the ETL, therefore no additional changes in the pipeline should be necessary. | Department of Housing and Urban Development (HUD) | 2014-2018 | Comprehensive housing affordability strategy dataset | 50 States + DC+ PR | Census 2010 | Yes | Update init function with new 140.csv zipped file references, including (1) the updated file staged in S3 and (2) the originating huduser.gov download URL. | Blair Russell, Office of Policy Development and Research; HUD (Blair.D.Russell@hud.gov) | https://www.huduser.gov/portal/datasets/cp.html#2006-2019_data | https://www.huduser.gov/portal/datasets/cp.html#data_2006-2021 |
|
||||
| **Housing** | Lack of Green Space | Amount of land, not including crop land, that is covered with artificial materials like concrete or pavement | | Multi-Resolution Land Characteristics Consortium | 2019 | National Land Cover Database (USGS) | 48 States + DC | Possibly not bound to geographies because its raster data. TPL imputed to census 2010 for us, I think. | Maybe? Use same data but pre process to Census 2020 | To-Do | Was provided by the trust for public land but you can also get it here as image data https://www.sciencebase.gov/catalog/item/5f21cef582cef313ed940043 | |
|
||||
| **Housing** | Lack of Indoor Plumbing | Share of homes without indoor kitchens or plumbing | Maybe could be found in ACS? Also: There is a note about [suppressed fields](https://www.huduser.gov/portal/datasets/cp.html) in the updated datasets relative to pre-2018 data. The impacted categories come from tables that do not appear to be used in the ETL, therefore no additional changes in the pipeline should be necessary. | Department of Housing and Urban Development (HUD) | 2014-2018 | Comprehensive housing affordability strategy dataset | 50 States + DC + PR | Census 2010 | Yes | Update init function with new 140.csv zipped file references, including (1) the updated file staged in S3 and (2) the originating huduser.gov download URL. | Blair Russell, Office of Policy Development and Research; HUD (Blair.D.Russell@hud.gov) | https://www.huduser.gov/portal/datasets/cp.html#2006-2019_data | https://www.huduser.gov/portal/datasets/cp.html#data_2006-2021 |
|
||||
| **Housing** | Lead paint | Share of homes that are likely to have lead paint | Share of homes built before 1960, which indicates potential lead paint exposure. Tracts with extremely high home values (i.e. median home values above the 90th percentile) that are less likely to face health risks from lead paint exposure are not included. | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021. (Need to check whether newest releases will work for us.) | To-Do | | | |
|
||||
| **Legacy Pollution** | Abandoned Mine Land | Presence of one or more abandoned min land within the tract | It looks like data is queried in the online GUI then exported. | Department of the Interior, Office of Surface Mining Reclamation and Enforcement | 2017 | Abandoned Mine Land Inventory System | 50 States + DC | Point Data | Yes, points can be mapped to any geography | Update init function with new "eAMLIS export of all data.tsv.zip" file reference in S3. | | https://www.osmre.gov/programs/e-amlis | https://amlis.osmre.gov/ |
|
||||
| **Legacy Pollution** | Formerly used Defense Site | Presence of one or more formerly used defense site within the tract | | US Army Corps of Engineers | 2019 | Formerly Used Defense Sites | 50 States + DC | Point Data | Yes, points can be mapped to any geography | To-Do | | https://www.usace.army.mil/Missions/Environmental/Formerly-Used-Defense-Sites/ | To-Do |
|
||||
| **Legacy Pollution** | Proximity to Hazardous Waste Facilities | count of hazardous waste facilities within 5 km | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | EPA | 2020 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Legacy Pollution** | Proximity to Risk Management Plan Facilities | count of risk management plan facilities within 5 kilometers | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | EPA | 2020 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Legacy Pollution** | Proximity to Superfund Sites | count of proposed or listed superfund or national priorities list sites within 5 km | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | EPA | 2021 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Transportation** | Diesel particulate matter exposure | amount of diesel exhaust in the air | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | EPA | 2014 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Transportation** | transportation barriers | average of relative cost and time spent on transportation | This is an example of when field naming does NOT come from [field_names.py](data/data-pipeline/data_pipeline/score/field_names.py). | DOT | 2022 | Transportation Access Disadvantage | 50 States + DC | Census 2020 | Yes | Update init function with new Shapefile_and_Metadata.zip file references. | | https://www.transportation.gov/equity-Justice40#:~:text=Transportation%20access%20disadvantage%20identifies%20communities%20and%20places%20that%20spend%20more%2C%20and%20take%20longer%2C%20to%20get%20where%20they%20need%20to%20go.%20(4) | https://www.transportation.gov/foia/foia-electronic-reading-room-category-four |
|
||||
| **Transportation** | traffic proximity and volume | count of vehicles at major roads within 500 meters | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | DOT (via EPA) | 2017 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Water and Wastewater** | underground storage tanks and releases | formula of the density of leaking underground storage tanks and number of all active underground storage tanks within 1500 feet of the census tract boundaries | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | EPA /UST Finder | 2021 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Water and Wastewater** | wastewater discharge | modeled toxic concentrations at parts of streams within 500 meters | The "LOWINCPCT" column gets renamed to "Poverty (Less than 200% of federal poverty line)" in L93. We should look at if/how that gets used since the logic we worked on lives in the census_acs ETL. | EPA Risk Screening Environmental Indicators | 2020 | EJ Screen | 50 states, DC + Islands | Census 2010 & Census 2020 | Yes | Update init function with new EJSCREEN_202X_USPR_Tracts.csv.zip file reference. There isn't a S3 reference specified in the function, so that's something we could potentially stage to pattern match most of the other etl files. | | https://gaftp.epa.gov/ejscreen/ | https://gaftp.epa.gov/ejscreen/ |
|
||||
| **Workforce Development** | Linguistic isolation | Share of households where no one over age 14 speaks English very well | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021. (Need to check whether newest releases will work for us.) | To-Do | | | |
|
||||
| **Workforce Development** | low median income | comparison of median income in the tract to the median incomes in the area | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021. (Need to check whether newest releases will work for us.) | To-Do | | | |
|
||||
| **Workforce Development** | poverty | share of people in households where income is at or below 100% of the federal poverty level | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021. (Need to check whether newest releases will work for us.) | To-Do | | | |
|
||||
| **Workforce Development** | Unemployment | number of unemployed people as a part of the labor force | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021. (Need to check whether newest releases will work for us.) | To-Do | | | |
|
||||
| **Workforce Development** | High school Education | Percent of people ages 25 or older whose high school education is less than a high school diploma | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021. (Need to check whether newest releases will work for us.) | To-Do | | | |
|
||||
| **Multiple Factors** | Low income | People in households where income is less than or equal to twice the federal poverty level, not including students enrolled in higher ed | | US Census | 2015-2019 | American Community Survey | 50 States + DC.+ PR | Census 2010, Census 2020 | Yes, can update to ACS 2017-2021. (Need to check whether newest releases will work for us.) | To-Do | | | |
|
||||
|
|
|
@ -12,7 +12,7 @@ $ cd j40-cejst-2
|
|||
|
||||
Install [`docker`](https://docs.docker.com/get-docker/). See [Install Docker](INSTALLATION.md#install-docker).
|
||||
|
||||
> _Important_: To be able to run the entire application, you may need to increase the memory allocated for docker to at least 8096 MB. See [this post](https://stackoverflow.com/a/44533437) for more details.
|
||||
> _Important_: To be able to run the entire application, you may need to increase the memory allocated for docker to at least 10 GB. See [this post](https://stackoverflow.com/a/44533437) for more details. If you are using docker to just run the local web server and not running the full data pipeline, then only 4GB of memory is needed for docker.
|
||||
|
||||
Use [`docker compose`](https://docs.docker.com/compose/) to run the full application:
|
||||
|
||||
|
@ -25,20 +25,28 @@ The data pipeline container can run the entire data pipeline, or any individual
|
|||
|
||||
The data server will make the files created by the data pipeline container available to the web server. The data pipeline container mounts the local repo directories to read and write files. The data server presents the local files to the webserver to render the map and downloadable files.
|
||||
|
||||
The web server will run the application website. After it initializes, you should be able to open the web server in your browser at [`http://localhost:8000`](http://localhost:8000). If the data pipeline container is set to run the full data pipeline, the website will not pick up the changes until the pipeline completes.
|
||||
The web server will run the application website. After it initializes, you should be able to open the web server in your browser at [`http://localhost:8000`](http://localhost:8000).
|
||||
|
||||
In order for docker to pick up code changes, the images will need to be rebuilt. If there are code changes in the data folder, the data pipeline image should be rebuilt. If there are code changes in the the client folder, the web server image should be rebuilt. The data server image should never have to be rebuilt.
|
||||
Previously, the web server became available immediately after launching docker compose. This meant the website could be up before the data pipeline finished scoring and creating the DAC map tiles. The setup has been changed so the web server will not start until the data server has been started, and the data server will not start until the data pipeline finishes whatever start up command it was given.
|
||||
|
||||
In order for docker to pick up code changes, the images will need to be rebuilt. If there are code changes in the data folder, the data pipeline image should be rebuilt. If there are code changes in the the client folder, the web server image should be rebuilt. The data server image should rarely ever have to be rebuilt.
|
||||
|
||||
Command to rebuild the data pipeline image:
|
||||
|
||||
```sh
|
||||
$ docker build ./data/data-pipeline -t 'j40_data_pipeline'
|
||||
```
|
||||
Command to rebuild the data server image:
|
||||
|
||||
```sh
|
||||
$ docker build ./data/data-serve -t 'j40_score_server'
|
||||
```
|
||||
|
||||
Command to rebuild the web server image:
|
||||
|
||||
```sh
|
||||
$ docker build ./client -t 'j40_website'
|
||||
```
|
||||
Depending on what code has changed, it may be necessary to clear the cache for the image rebuild. If the image rebuild fails, insert this flag immediately after "build": "--no-cache"
|
||||
|
||||
Once one or both images are rebuilt, you can re-run the docker compose command.
|
||||
Once the required images are rebuilt, you can re-run the docker compose command.
|
41
README.md
41
README.md
|
@ -26,6 +26,47 @@ The intermediate steps of the data pipeline, the scores, and the final output th
|
|||
|
||||
If you want to run the entire application locally, visit [QUICKSTART.md](QUICKSTART.md).
|
||||
|
||||
### Updating Data Sources
|
||||
|
||||
CEJST version 2.0 uses 2010 Census tracts as the primary unit of analysis and external key to link most datasets. Data published after 2020 will generally use 2020 Census tracts, so updating CEJST datasets to newer vintages will generally involve incorporating 2020 Census tracts.
|
||||
|
||||
Option 1: Keep 2010 boundaries on map
|
||||
- Makes sense if we are not updating updating American Community Survey (source of tract info & demographics, and low income for states & PR)
|
||||
- Lower lift option to update a few individual datasets
|
||||
|
||||
Option 2: Update to 2020 boundaries on map
|
||||
- Makes sense if we are updating American Community Survey (source of tract info & demographics, and low income for states & PR)
|
||||
- Higher lift but will eventually need to happen
|
||||
|
||||
In either case, we need to enable translation across Census tract vintages. The Census provides a simple relationship file.
|
||||
|
||||
Crosswalk:
|
||||
https://www2.census.gov/geo/docs/maps-data/data/rel2020/tract/tab20_tract20_tract10_natl.txt
|
||||
|
||||
Explanation of crosswalk:
|
||||
https://www2.census.gov/geo/pdfs/maps-data/data/rel2020/tract/explanation_tab20_tract20_tract10.pdf
|
||||
|
||||
NB: Crosswalks for territories are stored in separate files.
|
||||
|
||||
The average_tract_translate() function in [utils.py](data/data-pipeline/data_pipeline/utils.py) can be used to translate between 2010 and 2020 tract boundaries. For example, if we update to ACS data with 2020 boundaries, we will need to translate all data sources that are still using 2010 boundaries. To do this, average_tract_translate() will take each 2020 tract ID and find all the 2010 tracts that are mapped to it, and then take the mean of each column across these mapped 2010 tracts. Note that this function only works on numeric columns. The current set-up requires the crosswalk to be passed in as an argument; it may be easier to upload a static copy of the crosswalk and read it in at the beginning of the function.
|
||||
|
||||
Overview of how to update a source:
|
||||
1) If this is the first source being updated to 2020 geography, add new bucket in AWS for 2020 data. Stage the new data sources in s3.
|
||||
2) If this is the first source being updated to 2020 geography, the GEOID variable will need to be split into two variables, one for 2010 and one for 2020. Naming and conventions will depend on whether we still want to use 2010 geographies in the map.
|
||||
3) Look at [DATASETS.md](DATASETS.md) to see specific update instrutions for each data source, including URLs for updated data sources.
|
||||
4) Check to see that the columns we're using still exist in the new data source. If not, make a plan for methodology changes.
|
||||
5) Update paths in ETL files.
|
||||
6) Update path in ETL file else statement where possible.
|
||||
7) Update GEOID variable definitions in ETL files.
|
||||
8) If the updated data source is using different tract boundaries from what we want to use on the map, call the function in utils.py at the end of the ETL file.
|
||||
9) Update [DATASETS.md](DATASETS.md) to reflect the new changes.
|
||||
|
||||
In same cases, updated data isn't available yet:
|
||||
- CDC life expectancy at birth by state
|
||||
- First Street Foundation (acquired through email; can request again through email, or try to use proprietary API)
|
||||
|
||||
Legacy pollution date from the US Army Corps of Engineers uses geolocation to map their data to Census tracts. Points can be mapped to any geography, but we will need to update our mappings if we want to use 2020 tracts boundaries in the map.
|
||||
|
||||
### Advanced Guides
|
||||
|
||||
If you have software experience or more specific use cases, in-depth documentation of how to work with this project can be found in [INSTALLATION.md](INSTALLATION.md).
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
|
||||
# Feature Tiles env variables:
|
||||
# The TILES_BASE_URL will be determined by the DATA_SOURCE env variable
|
||||
GATSBY_CDN_TILES_BASE_URL=https://static-data-screeningtool.geoplatform.gov
|
||||
GATSBY_LOCAL_TILES_BASE_URL=http://localhost:5000/data/data-pipeline
|
||||
GATSBY_CDN_TILES_BASE_URL=https://dig0wsohit6js.cloudfront.net
|
||||
GATSBY_LOCAL_TILES_BASE_URL=http://localhost:5080/data/data-pipeline
|
||||
|
||||
GATSBY_DATA_PIPELINE_SCORE_PATH_LOCAL=data_pipeline/data/score
|
||||
GATSBY_DATA_PIPELINE_TRIBAL_PATH_LOCAL=data_pipeline/data/tribal
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
FROM node:14
|
||||
|
||||
ENV PORT=6000
|
||||
|
||||
# Set working directory like 'cd' command, any subsequent instructions in this docker file, will start from
|
||||
# this working directory
|
||||
WORKDIR /client
|
||||
|
||||
# install all packages as a layer in the docker image / container
|
||||
COPY package*.json ./
|
||||
RUN npm ci && npm cache clean --force
|
||||
|
||||
# copy all local files from the working directory to the docker image/container however we must use
|
||||
# dockerignore to ignore node_modules so that the image can use what what was just installed from the above
|
||||
# step.
|
||||
COPY . .
|
||||
|
||||
# install all packages as a layer in the docker image / container
|
||||
RUN npm ci
|
||||
|
||||
ENV PORT=6000
|
||||
|
||||
EXPOSE 6000
|
||||
|
||||
CMD [ "npm", "start"]
|
||||
CMD [ "npm", "start"]
|
|
@ -30,11 +30,14 @@ When(`I click on the {string} button in the navigation`, (page) => {
|
|||
|
||||
When(`I look for the {string} CTA`, (ctaString) => {
|
||||
cy.get(`[data-cy="${hyphenizeString(ctaString)}-block"]`).as('CTA_block');
|
||||
cy.get('@CTA_block').scrollIntoView().should('be.visible');
|
||||
cy.get('@CTA_block').scrollIntoView();
|
||||
cy.get('@CTA_block').should('be.visible');
|
||||
});
|
||||
|
||||
When(`I look for the {string}`, (footer) => {
|
||||
cy.get(`[data-cy="${hyphenizeString(footer)}-primary-block"]`).scrollIntoView().should('be.visible');
|
||||
cy.get(`[data-cy="${hyphenizeString(footer)}-primary-block"]`).as('string_block');
|
||||
cy.get('@string_block').scrollIntoView();
|
||||
cy.get('@string_block').should('be.visible');
|
||||
});
|
||||
|
||||
// Common Thens:
|
||||
|
|
5
client/package-lock.json
generated
5
client/package-lock.json
generated
|
@ -4,11 +4,6 @@
|
|||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"-": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/-/-/--0.0.1.tgz",
|
||||
"integrity": "sha512-3HfneK3DGAm05fpyj20sT3apkNcvPpCuccOThOPdzz8sY7GgQGe0l93XH9bt+YzibcTIgUAIMoyVJI740RtgyQ=="
|
||||
},
|
||||
"@adobe/css-tools": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.2.0.tgz",
|
||||
|
|
|
@ -12,9 +12,7 @@ import LinkTypeWrapper from '../LinkTypeWrapper';
|
|||
import SurveyButton from '../SurveyButton';
|
||||
|
||||
// @ts-ignore
|
||||
import {GITHUB_LINK, GITHUB_LINK_ES} from '../../constants';
|
||||
import {PAGES_ENDPOINTS} from '../../data/constants';
|
||||
import * as ABOUT_COPY from '../../data/copy/about';
|
||||
import * as COMMON_COPY from '../../data/copy/common';
|
||||
import whitehouseIcon from '../../images/eop-seal.svg';
|
||||
|
||||
|
@ -64,19 +62,6 @@ const J40Footer = () => {
|
|||
dataCy={hyphenizeString(COMMON_COPY.FOOTER.FIND_CONTACT.defaultMessage)}
|
||||
/>,
|
||||
],
|
||||
[
|
||||
intl.formatMessage(COMMON_COPY.FOOTER.CONTRIBUTE),
|
||||
<LinkTypeWrapper
|
||||
linkText={intl.formatMessage(COMMON_COPY.FOOTER.GITHUB_LINK_TEXT)}
|
||||
internal={false}
|
||||
url={intl.locale === 'es' ? GITHUB_LINK_ES : GITHUB_LINK}
|
||||
openUrlNewTab={true}
|
||||
className={'footer-link-first-child'}
|
||||
key={'contactlink'}
|
||||
dataCy={hyphenizeString(COMMON_COPY.FOOTER.GITHUB_LINK_TEXT.defaultMessage)}
|
||||
tag={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_LINK_TAG)}
|
||||
/>,
|
||||
],
|
||||
];
|
||||
|
||||
// see https://designsystem.digital.gov/components/footer/
|
||||
|
|
|
@ -114,41 +114,6 @@ exports[`J40Footer renders correctly 1`] = `
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the J40Header checks if component renders 1`] = `
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
|
|
@ -11,7 +11,6 @@ import J40MainGridContainer from '../components/J40MainGridContainer';
|
|||
import Layout from '../components/layout';
|
||||
import SubPageNav from '../components/SubPageNav';
|
||||
|
||||
import {GITHUB_LINK, GITHUB_LINK_ES} from '../constants';
|
||||
import {DATA_SURVEY_LINKS, PAGES_ENDPOINTS, USWDS_BREAKPOINTS} from '../data/constants';
|
||||
import * as ABOUT_COPY from '../data/copy/about';
|
||||
import {FEEDBACK_EMAIL} from '../data/copy/common';
|
||||
|
@ -20,8 +19,6 @@ import {FEEDBACK_EMAIL} from '../data/copy/common';
|
|||
import commentIcon from // @ts-ignore
|
||||
'/node_modules/uswds/dist/img/usa-icons/comment.svg';
|
||||
|
||||
import githubIcon from // @ts-ignore
|
||||
'/node_modules/uswds/dist/img/usa-icons/github.svg';
|
||||
|
||||
interface IAboutPageProps {
|
||||
location: Location;
|
||||
|
@ -113,19 +110,9 @@ const AboutPage = ({location}: IAboutPageProps) => {
|
|||
</p>
|
||||
</AboutCard>
|
||||
|
||||
<AboutCard
|
||||
size={'small'}
|
||||
imgSrc={githubIcon}
|
||||
header={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_HEADING)}
|
||||
linkText={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_LINK_TEXT)}
|
||||
linkTag={intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_LINK_TAG)}
|
||||
url={intl.locale === 'es' ? GITHUB_LINK_ES : GITHUB_LINK}
|
||||
openUrlNewTab={true}
|
||||
internal={false}>
|
||||
<p>
|
||||
{intl.formatMessage(ABOUT_COPY.GET_INVOLVED.JOIN_OSC_INFO)}
|
||||
</p>
|
||||
</AboutCard>
|
||||
<div className='grid-gap-lg tablet:grid-col'>
|
||||
{/* spacer */}
|
||||
</div>
|
||||
</AboutCardsContainer>
|
||||
|
||||
<Grid col={12} tablet={{col: 8}}>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
@ -571,69 +558,7 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</div>
|
||||
<div
|
||||
class="grid-gap-lg tablet:grid-col"
|
||||
data-testid="grid"
|
||||
>
|
||||
<div
|
||||
class="grid-row j40-aboutcard-sm-card"
|
||||
data-testid="grid"
|
||||
>
|
||||
<div
|
||||
class="tablet:grid-col-2 j40-aboutpage-image-container"
|
||||
data-testid="grid"
|
||||
>
|
||||
<img
|
||||
alt="Join the open source community"
|
||||
class="j40-aboutcard-image"
|
||||
src="test-file-stub"
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
class="tablet:grid-col-9"
|
||||
data-cy="join-the-open-source-community-block"
|
||||
data-testid="grid"
|
||||
>
|
||||
<div
|
||||
class="grid-row"
|
||||
data-testid="grid"
|
||||
>
|
||||
<h3>
|
||||
Join the open source community
|
||||
</h3>
|
||||
<p>
|
||||
|
||||
The tool’s code is open source, which means it is available for the public to view and contribute to it.
|
||||
|
||||
</p>
|
||||
<div
|
||||
class="j40-aboutcard-sm-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external"
|
||||
data-cy=""
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check it out on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
class="grid-col-1"
|
||||
data-testid="grid"
|
||||
>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
class="grid-col-12 tablet:grid-col-8"
|
||||
|
@ -797,41 +722,6 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
@ -582,41 +569,6 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
@ -714,41 +701,6 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
@ -1244,41 +1231,6 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
@ -3271,41 +3258,6 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the Privacy Policy page matches Privacy Policy page snapsh
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
@ -706,41 +693,6 @@ exports[`rendering of the Privacy Policy page matches Privacy Policy page snapsh
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,24 +161,11 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div
|
||||
class="usa-alert usa-alert--info"
|
||||
data-testid="alert"
|
||||
>
|
||||
<div
|
||||
class="usa-alert__body"
|
||||
>
|
||||
<h1
|
||||
class="usa-alert__heading"
|
||||
>
|
||||
Version 2.0 of the tool is now available
|
||||
</h1>
|
||||
<p
|
||||
class="usa-alert__text"
|
||||
>
|
||||
The Council on Environmental Quality (CEQ) made the 2.0 version of the tool available on Dec 20, 2024.
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<strong>
|
||||
This tool has been updated.
|
||||
</strong>
|
||||
The 2.0 version of the tool was released on Dec 20, 2024.
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
|
@ -522,41 +509,6 @@ exports[`rendering of the DatasetContainer checks if various text fields are vis
|
|||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<div
|
||||
class="mobile-lg:grid-col-12 desktop:grid-col-4"
|
||||
>
|
||||
<section>
|
||||
<div
|
||||
class="j40-h4"
|
||||
>
|
||||
Want to contribute?
|
||||
</div>
|
||||
<ul
|
||||
class="usa-list usa-list--unstyled"
|
||||
>
|
||||
<li
|
||||
class="usa-footer__secondary-link"
|
||||
>
|
||||
<a
|
||||
class="usa-link usa-link--external footer-link-first-child"
|
||||
data-cy="check-out-the-code-on-github"
|
||||
href="https://github.com/DOI-DO/ceq-j40-cejst-2"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
Check out the code on GitHub
|
||||
</a>
|
||||
|
||||
<span
|
||||
class="usa-tag "
|
||||
data-testid="tag"
|
||||
>
|
||||
New Location
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -1,57 +1,49 @@
|
|||
FROM ubuntu:22.04
|
||||
|
||||
ENV TZ=America/Los_Angeles
|
||||
FROM ubuntu:24.04
|
||||
|
||||
# Install packages
|
||||
RUN apt-get update && TZ=America/Los_Angeles DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
||||
build-essential \
|
||||
make \
|
||||
gcc \
|
||||
git \
|
||||
unzip \
|
||||
wget \
|
||||
RUN apt-get update -y && \
|
||||
apt-get upgrade -y && \
|
||||
apt-get install -y \
|
||||
software-properties-common \
|
||||
libsqlite3-dev \
|
||||
zlib1g-dev
|
||||
tippecanoe \
|
||||
libgdal-dev
|
||||
|
||||
# Install python3.10 and dependencies
|
||||
RUN add-apt-repository ppa:deadsnakes/ppa -y && \
|
||||
apt-get update -y && \
|
||||
apt remove python3-cryptography -y && \
|
||||
apt install -y python3.10-dev && \
|
||||
apt install -y python3-pip && \
|
||||
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 1 && \
|
||||
update-alternatives --config python3 && \
|
||||
pip install cryptography && \
|
||||
pip install cffi && \
|
||||
pip install openpyxl
|
||||
|
||||
# tippecanoe
|
||||
RUN apt-get update
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
RUN apt-add-repository -y ppa:git-core/ppa
|
||||
RUN mkdir -p /tmp/tippecanoe-src && git clone https://github.com/mapbox/tippecanoe.git /tmp/tippecanoe-src
|
||||
WORKDIR /tmp/tippecanoe-src
|
||||
RUN /bin/sh -c make && make install
|
||||
|
||||
## gdal
|
||||
RUN add-apt-repository ppa:ubuntugis/ppa
|
||||
RUN apt-get -y install gdal-bin libgdal-dev
|
||||
|
||||
# Install python3.10
|
||||
RUN add-apt-repository ppa:deadsnakes/ppa
|
||||
RUN apt install -y python3.10-dev
|
||||
RUN apt install -y python3-pip
|
||||
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 1
|
||||
RUN update-alternatives --config python3
|
||||
|
||||
# Copy all project files into the container
|
||||
COPY . /data-pipeline
|
||||
# Set the working directory
|
||||
WORKDIR /data-pipeline
|
||||
|
||||
# Python package installation using poetry. See:
|
||||
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker
|
||||
# Set environment variables
|
||||
ENV PYTHONFAULTHANDLER=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PYTHONHASHSEED=random \
|
||||
PIP_NO_CACHE_DIR=off \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=on \
|
||||
PYTHONHASHSEED=0 \
|
||||
PIP_NO_CACHE_DIR=on \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||
PIP_DEFAULT_TIMEOUT=100 \
|
||||
POETRY_VERSION=1.8.4
|
||||
POETRY_VERSION=1.8.4
|
||||
|
||||
RUN pip install "poetry==$POETRY_VERSION"
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry config virtualenvs.in-project false \
|
||||
&& poetry install --only main --no-interaction --no-ansi
|
||||
RUN pip install openpyxl
|
||||
# Install poetry
|
||||
RUN apt-get update -y && \
|
||||
pip install "poetry==$POETRY_VERSION"
|
||||
|
||||
# Copy configs and install packages
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
RUN poetry config virtualenvs.create false && \
|
||||
poetry config virtualenvs.in-project false && \
|
||||
poetry install --only main --no-interaction --no-ansi
|
||||
|
||||
# Copy all project files into the container
|
||||
COPY . .
|
||||
|
||||
# Default behavior is to output the options for the base application. This prevents the entire pipeline from running unintentionally.
|
||||
ENV PIPELINE_CMD="data_pipeline.application --help"
|
||||
|
|
|
@ -34,12 +34,12 @@ DATA_SCORE_CSV_DIR = DATA_SCORE_DIR / "csv"
|
|||
DATA_SCORE_CSV_FULL_DIR = DATA_SCORE_CSV_DIR / "full"
|
||||
DATA_SCORE_CSV_FULL_FILE_PATH = DATA_SCORE_CSV_FULL_DIR / "usa_score.parquet"
|
||||
FULL_SCORE_CSV_FULL_PLUS_COUNTIES_FILE_PATH = (
|
||||
DATA_SCORE_CSV_FULL_DIR / "usa_counties.csv"
|
||||
DATA_SCORE_CSV_FULL_DIR / "usa_counties.parquet"
|
||||
)
|
||||
|
||||
# Score Tile CSV source path
|
||||
DATA_SCORE_CSV_TILES_PATH = DATA_SCORE_CSV_DIR / "tiles"
|
||||
DATA_SCORE_CSV_TILES_FILE_PATH = DATA_SCORE_CSV_TILES_PATH / "usa.csv"
|
||||
DATA_SCORE_CSV_TILES_FILE_PATH = DATA_SCORE_CSV_TILES_PATH / "usa.parquet"
|
||||
DATA_SCORE_JSON_INDEX_FILE_PATH = (
|
||||
DATA_SCORE_CSV_TILES_PATH / "tile_indexes.json"
|
||||
)
|
||||
|
|
|
@ -35,7 +35,6 @@ class GeoScoreETL(ExtractTransformLoad):
|
|||
self.SCORE_SHP_FILE = self.SCORE_SHP_PATH / "usa.shp"
|
||||
|
||||
self.SCORE_CSV_PATH = self.DATA_PATH / "score" / "csv"
|
||||
self.TILE_SCORE_CSV = self.SCORE_CSV_PATH / "tiles" / "usa.csv"
|
||||
|
||||
self.CENSUS_USA_GEOJSON = constants.DATA_CENSUS_GEOJSON_FILE_PATH
|
||||
|
||||
|
@ -100,13 +99,9 @@ class GeoScoreETL(ExtractTransformLoad):
|
|||
full_geojson_usa_df[self.LAND_FIELD_NAME] > 0
|
||||
]
|
||||
|
||||
logger.info("Reading tile score CSV")
|
||||
self.score_usa_df = pd.read_csv(
|
||||
self.TILE_SCORE_CSV,
|
||||
dtype={
|
||||
self.TRACT_SHORT_FIELD: str,
|
||||
},
|
||||
low_memory=False,
|
||||
logger.info("Reading tile score")
|
||||
self.score_usa_df = pd.read_parquet(
|
||||
constants.DATA_SCORE_CSV_TILES_FILE_PATH,
|
||||
)
|
||||
|
||||
def transform(self) -> None:
|
||||
|
|
|
@ -442,15 +442,14 @@ class PostScoreETL(ExtractTransformLoad):
|
|||
self.input_census_geo_df
|
||||
)
|
||||
|
||||
def _load_score_csv_full(
|
||||
self, score_county_state_merged: pd.DataFrame, score_csv_path: Path
|
||||
def _load_score_full(
|
||||
self, score_county_state_merged: pd.DataFrame, score_path: Path
|
||||
) -> None:
|
||||
logger.debug("Saving Full Score CSV with County Information")
|
||||
score_csv_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
score_county_state_merged.to_csv(
|
||||
score_csv_path,
|
||||
score_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
score_county_state_merged.to_parquet(
|
||||
score_path,
|
||||
index=False,
|
||||
encoding="utf-8-sig", # windows compat https://stackoverflow.com/a/43684587
|
||||
)
|
||||
|
||||
def _load_excel_from_df(
|
||||
|
@ -514,12 +513,12 @@ class PostScoreETL(ExtractTransformLoad):
|
|||
|
||||
return excel_csv_config
|
||||
|
||||
def _load_tile_csv(
|
||||
def _load_tile_score(
|
||||
self, score_tiles_df: pd.DataFrame, tile_score_path: Path
|
||||
) -> None:
|
||||
logger.debug("Saving Tile Score CSV")
|
||||
logger.debug("Saving Tile Score")
|
||||
tile_score_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
score_tiles_df.to_csv(tile_score_path, index=False, encoding="utf-8")
|
||||
score_tiles_df.to_parquet(tile_score_path, index=False)
|
||||
|
||||
def _load_downloadable_zip(self, downloadable_info_path: Path) -> None:
|
||||
downloadable_info_path.mkdir(parents=True, exist_ok=True)
|
||||
|
@ -631,11 +630,11 @@ class PostScoreETL(ExtractTransformLoad):
|
|||
self.output_tract_search_df.to_json(output_path, orient="records")
|
||||
|
||||
def load(self) -> None:
|
||||
self._load_score_csv_full(
|
||||
self._load_score_full(
|
||||
self.output_score_county_state_merged_df,
|
||||
constants.FULL_SCORE_CSV_FULL_PLUS_COUNTIES_FILE_PATH,
|
||||
)
|
||||
self._load_tile_csv(
|
||||
self._load_tile_score(
|
||||
self.output_score_tiles_df, constants.DATA_SCORE_CSV_TILES_FILE_PATH
|
||||
)
|
||||
self._load_search_tract_data(constants.SCORE_TRACT_SEARCH_FILE_PATH)
|
||||
|
|
|
@ -43,17 +43,17 @@ def check_score_data_source(
|
|||
settings.AWS_JUSTICE40_DATAPIPELINE_URL
|
||||
+ "/data/score/csv/tiles/usa.csv"
|
||||
)
|
||||
TILE_SCORE_CSV = score_csv_data_path / "tiles" / "usa.csv"
|
||||
TILE_SCORE_FILE = constants.DATA_SCORE_CSV_TILES_FILE_PATH
|
||||
|
||||
# download from s3 if census_data_source is aws
|
||||
if score_data_source == "aws":
|
||||
logger.debug("Fetching Score Tile data from AWS S3")
|
||||
Downloader.download_file_from_url(
|
||||
file_url=TILE_SCORE_CSV_S3_URL, download_file_name=TILE_SCORE_CSV
|
||||
file_url=TILE_SCORE_CSV_S3_URL, download_file_name=TILE_SCORE_FILE
|
||||
)
|
||||
else:
|
||||
# check if score data is found locally
|
||||
if not os.path.isfile(TILE_SCORE_CSV):
|
||||
if not os.path.isfile(TILE_SCORE_FILE):
|
||||
logger.warning(
|
||||
"No local score tiles data found. Please use '-s aws` to fetch from AWS"
|
||||
)
|
||||
|
|
|
@ -110,9 +110,9 @@ def test_create_downloadable_data(
|
|||
)
|
||||
|
||||
|
||||
def test_load_score_csv_full(etl, score_data_expected):
|
||||
def test_load_score_full(etl, score_data_expected):
|
||||
reload(constants)
|
||||
etl._load_score_csv_full(
|
||||
etl._load_score_full(
|
||||
score_data_expected,
|
||||
constants.FULL_SCORE_CSV_FULL_PLUS_COUNTIES_FILE_PATH,
|
||||
)
|
||||
|
@ -121,7 +121,7 @@ def test_load_score_csv_full(etl, score_data_expected):
|
|||
|
||||
def test_load_tile_csv(etl, tile_data_expected):
|
||||
reload(constants)
|
||||
etl._load_score_csv_full(
|
||||
etl._load_score_full(
|
||||
tile_data_expected, constants.DATA_SCORE_CSV_TILES_FILE_PATH
|
||||
)
|
||||
assert constants.DATA_SCORE_CSV_TILES_FILE_PATH.is_file()
|
||||
|
|
|
@ -970,9 +970,8 @@ class CensusACSETL(ExtractTransformLoad):
|
|||
# Then the imputed field should have no nulls
|
||||
self.ADJUSTED_AND_IMPUTED_POVERTY_LESS_THAN_200_PERCENT_FPL_FIELD_NAME
|
||||
]
|
||||
.isna()
|
||||
.sum()
|
||||
== 0
|
||||
.notna()
|
||||
.all()
|
||||
), "Error: not all values were filled..."
|
||||
|
||||
logger.debug("Renaming columns...")
|
||||
|
|
|
@ -84,6 +84,6 @@ def add_tracts_for_geometries(
|
|||
df,
|
||||
tract_data[["GEOID10_TRACT", "geometry"]],
|
||||
how="inner",
|
||||
op="intersects",
|
||||
predicate="intersects",
|
||||
)
|
||||
return df
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -207,28 +207,34 @@ def test_max_40_percent_DAC(final_score_df):
|
|||
|
||||
|
||||
def test_donut_hole_addition_to_score_n(final_score_df):
|
||||
score_col_with_donuts = field_names.FINAL_SCORE_N_BOOLEAN
|
||||
score_col = field_names.SCORE_N_COMMUNITIES
|
||||
donut_hole_score_only = (
|
||||
dacs_col_with_donuts = field_names.FINAL_SCORE_N_BOOLEAN
|
||||
dacs_col = field_names.SCORE_N_COMMUNITIES
|
||||
donut_hole_community_col = (
|
||||
field_names.SCORE_N_COMMUNITIES + field_names.ADJACENT_MEAN_SUFFIX
|
||||
)
|
||||
count_donuts = final_score_df[donut_hole_score_only].sum()
|
||||
count_n = final_score_df[score_col].sum()
|
||||
count_n_with_donuts = final_score_df[score_col_with_donuts].sum()
|
||||
new_donuts = final_score_df[
|
||||
final_score_df[donut_hole_score_only] & ~final_score_df[score_col]
|
||||
# Number of donuts found regardless of other scoring.
|
||||
num_donuts = final_score_df[donut_hole_community_col].sum()
|
||||
|
||||
# Number of DACS not including adjacency.
|
||||
num_dacs = final_score_df[dacs_col].sum()
|
||||
|
||||
# Number of DACS including adjacency.
|
||||
num_dacs_with_donuts = final_score_df[dacs_col_with_donuts].sum()
|
||||
|
||||
# Number of DACS that are donuts.
|
||||
num_dacs_due_to_donuts = final_score_df[
|
||||
final_score_df[donut_hole_community_col] & ~final_score_df[dacs_col]
|
||||
].shape[0]
|
||||
|
||||
assert (
|
||||
new_donuts + count_n == count_n_with_donuts
|
||||
), "The math doesn't work! The number of new donut hole tracts plus score tracts (base) does not equal the total number of tracts identified"
|
||||
assert num_dacs_due_to_donuts <= num_dacs_with_donuts
|
||||
assert num_dacs_with_donuts >= num_dacs
|
||||
|
||||
assert (
|
||||
count_donuts < count_n
|
||||
num_donuts < num_dacs
|
||||
), "There are more donut hole tracts than base tracts. How can it be?"
|
||||
|
||||
assert (
|
||||
new_donuts > 0
|
||||
num_dacs_due_to_donuts > 0
|
||||
), "FYI: The adjacency index is doing nothing. Consider removing it?"
|
||||
|
||||
|
||||
|
@ -429,30 +435,6 @@ def test_all_tracts_have_scores(final_score_df):
|
|||
|
||||
|
||||
def test_imputed_tracts(final_score_df):
|
||||
# Make sure that any tracts with zero population have null imputed income
|
||||
tracts_with_zero_population_df = final_score_df[
|
||||
final_score_df[field_names.TOTAL_POP_FIELD] == 0
|
||||
]
|
||||
assert (
|
||||
tracts_with_zero_population_df[
|
||||
field_names.POVERTY_LESS_THAN_200_FPL_IMPUTED_FIELD
|
||||
]
|
||||
.isna()
|
||||
.all()
|
||||
)
|
||||
|
||||
# Make sure that any tracts with null population have null imputed income
|
||||
tracts_with_null_population_df = final_score_df[
|
||||
final_score_df[field_names.TOTAL_POP_FIELD].isnull()
|
||||
]
|
||||
assert (
|
||||
tracts_with_null_population_df[
|
||||
field_names.POVERTY_LESS_THAN_200_FPL_IMPUTED_FIELD
|
||||
]
|
||||
.isna()
|
||||
.all()
|
||||
)
|
||||
|
||||
# Make sure that no tracts with population have null imputed income
|
||||
# We DO NOT impute income for island areas, so remove those from the test
|
||||
is_island_area = (
|
||||
|
|
|
@ -8,11 +8,6 @@ import pandas as pd
|
|||
import pytest
|
||||
from data_pipeline.config import settings
|
||||
from data_pipeline.etl.score import constants
|
||||
from data_pipeline.etl.score.constants import THRESHOLD_COUNT_TO_SHOW_FIELD_NAME
|
||||
from data_pipeline.etl.score.constants import TILES_SCORE_COLUMNS
|
||||
from data_pipeline.etl.score.constants import (
|
||||
USER_INTERFACE_EXPERIENCE_FIELD_NAME,
|
||||
)
|
||||
from data_pipeline.score import field_names
|
||||
|
||||
from .fixtures import final_score_df # pylint: disable=unused-import
|
||||
|
@ -22,10 +17,8 @@ pytestmark = pytest.mark.smoketest
|
|||
|
||||
@pytest.fixture
|
||||
def tiles_df(scope="session"):
|
||||
return pd.read_csv(
|
||||
settings.APP_ROOT / "data" / "score" / "csv" / "tiles" / "usa.csv",
|
||||
dtype={"GTF": str},
|
||||
low_memory=False,
|
||||
return pd.read_parquet(
|
||||
constants.DATA_SCORE_CSV_TILES_FILE_PATH,
|
||||
)
|
||||
|
||||
|
||||
|
@ -73,7 +66,6 @@ def test_percentiles(tiles_df):
|
|||
assert (tiles_df[col].median() >= 0.4) & (
|
||||
tiles_df[col].median() <= 0.6
|
||||
), f"Percentile distribution for {col} is decidedly not uniform"
|
||||
return True
|
||||
|
||||
|
||||
def test_count_of_fips_codes(tiles_df, final_score_df):
|
||||
|
@ -91,19 +83,19 @@ def test_count_of_fips_codes(tiles_df, final_score_df):
|
|||
|
||||
|
||||
def test_column_presence(tiles_df):
|
||||
expected_column_names = set(TILES_SCORE_COLUMNS.values()) | {
|
||||
THRESHOLD_COUNT_TO_SHOW_FIELD_NAME,
|
||||
USER_INTERFACE_EXPERIENCE_FIELD_NAME,
|
||||
expected_column_names = set(constants.TILES_SCORE_COLUMNS.values()) | {
|
||||
constants.THRESHOLD_COUNT_TO_SHOW_FIELD_NAME,
|
||||
constants.USER_INTERFACE_EXPERIENCE_FIELD_NAME,
|
||||
}
|
||||
actual_column_names = set(tiles_df.columns)
|
||||
extra_columns = actual_column_names - expected_column_names
|
||||
missing_columns = expected_column_names - expected_column_names
|
||||
assert not (
|
||||
extra_columns
|
||||
), f"tiles/usa.csv has columns not specified in TILE_SCORE_COLUMNS: {extra_columns}"
|
||||
), f"tiles score has columns not specified in TILE_SCORE_COLUMNS: {extra_columns}"
|
||||
assert not (
|
||||
missing_columns
|
||||
), f"tiles/usa.csv is missing columns from TILE_SCORE_COLUMNS: {missing_columns}"
|
||||
), f"tiles score is missing columns from TILE_SCORE_COLUMNS: {missing_columns}"
|
||||
|
||||
|
||||
def test_tract_equality(tiles_df, final_score_df):
|
||||
|
@ -189,12 +181,17 @@ def test_for_column_fidelitiy_from_score(tiles_df, final_score_df):
|
|||
# every tile column
|
||||
# * Because tiles use rounded floats, we use close with a tolerance
|
||||
assert (
|
||||
set(TILES_SCORE_COLUMNS.values()) - set(tiles_df.columns) == set()
|
||||
set(constants.TILES_SCORE_COLUMNS.values()) - set(tiles_df.columns)
|
||||
== set()
|
||||
), "Some TILES_SCORE_COLUMNS are missing from the tiles dataframe"
|
||||
|
||||
# Keep only the tiles score columns in the final score data
|
||||
final_score_df = final_score_df.rename(columns=TILES_SCORE_COLUMNS).drop(
|
||||
final_score_df.columns.difference(TILES_SCORE_COLUMNS.values()),
|
||||
final_score_df = final_score_df.rename(
|
||||
columns=constants.TILES_SCORE_COLUMNS
|
||||
).drop(
|
||||
final_score_df.columns.difference(
|
||||
constants.TILES_SCORE_COLUMNS.values()
|
||||
),
|
||||
axis=1,
|
||||
errors="ignore",
|
||||
)
|
||||
|
@ -227,7 +224,7 @@ def test_for_column_fidelitiy_from_score(tiles_df, final_score_df):
|
|||
assert not errors, error_message
|
||||
|
||||
|
||||
def test_for_geojson_fidelity_from_tiles_csv(tiles_df, tiles_geojson_df):
|
||||
def test_for_geojson_fidelity_from_tiles_score(tiles_df, tiles_geojson_df):
|
||||
tiles_geojson_df = tiles_geojson_df.drop(columns=["geometry"]).rename(
|
||||
columns={"GEOID10": "GTF"}
|
||||
)
|
||||
|
@ -252,11 +249,11 @@ def test_for_geojson_fidelity_from_tiles_csv(tiles_df, tiles_geojson_df):
|
|||
tiles_geojson_df[col_name] = tiles_df[col_name].replace({None: np.nan})
|
||||
error_message = f"Column {col_name} not equal "
|
||||
# For non-numeric types, we can use the built-in equals from pandas
|
||||
if tiles_df[col_name].dtype in [
|
||||
np.dtype(object),
|
||||
np.dtype(bool),
|
||||
np.dtype(str),
|
||||
]:
|
||||
if (
|
||||
pd.api.types.is_bool_dtype(tiles_df[col_name])
|
||||
or pd.api.types.is_object_dtype(tiles_df[col_name])
|
||||
or pd.api.types.is_string_dtype(tiles_df[col_name])
|
||||
):
|
||||
assert tiles_df[col_name].equals(
|
||||
tiles_geojson_df[col_name]
|
||||
), error_message
|
||||
|
|
|
@ -5,6 +5,7 @@ import shutil
|
|||
import sys
|
||||
import uuid
|
||||
import zipfile
|
||||
import pandas as pd
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import Union
|
||||
|
@ -348,6 +349,57 @@ def zip_directory(
|
|||
zipf.close()
|
||||
|
||||
|
||||
def average_tract_translate(
|
||||
df: pd.DataFrame,
|
||||
xwalk: pd.DataFrame,
|
||||
tract_year_in: str = "GEOID10_TRACT",
|
||||
tract_year_out: str = "GEOID20_TRACT",
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Minimally tested prototype of an averaging function
|
||||
|
||||
Can be used to translate between 2010 and 2020 tract boundaries.
|
||||
For example, if we update to ACS data with 2020 boundaries, we will need to
|
||||
translate all data sources that are still using 2010 boundaries. To do this,
|
||||
average_tract_translate() will take each 2020 tract ID and find all the 2010
|
||||
tracts that are mapped to it, and then take the mean of each column across
|
||||
these mapped 2010 tracts.
|
||||
|
||||
Note that this function only works on numeric columns.
|
||||
|
||||
The current set-up requires the crosswalk to be passed in as an argument;
|
||||
it may be easier to upload a static copy of the crosswalk
|
||||
and read it in at the beginning of the function.
|
||||
|
||||
Crosswalk:
|
||||
https://www2.census.gov/geo/docs/maps-data/data/rel2020/tract/tab20_tract20_tract10_natl.txt
|
||||
|
||||
Explanation of crosswalk:
|
||||
https://www2.census.gov/geo/pdfs/maps-data/data/rel2020/tract/explanation_tab20_tract20_tract10.pdf
|
||||
|
||||
NB: Crosswalks for territories are stored in separate files.
|
||||
"""
|
||||
|
||||
# pre-process xwalk
|
||||
# could be uploaded as a static copy and read in here
|
||||
xwalk = xwalk.rename(
|
||||
columns={
|
||||
"GEOID_TRACT_10": "GEOID10_TRACT",
|
||||
"GEOID_TRACT_20": "GEOID20_TRACT",
|
||||
}
|
||||
)
|
||||
xwalk = xwalk[["GEOID10_TRACT", "GEOID20_TRACT"]]
|
||||
|
||||
# merge xwalk into input data
|
||||
merged_df = df.merge(xwalk, how="left", on=tract_year_in)
|
||||
|
||||
# group by average
|
||||
averaged_df = merged_df.groupby(tract_year_out).mean()
|
||||
|
||||
# reindex (bc input df doesn't have tract ID as index but rather as column)
|
||||
return averaged_df.reset_index()
|
||||
|
||||
|
||||
def load_yaml_dict_from_file(
|
||||
yaml_file_path: Path,
|
||||
schema_class: Union[CSVConfig, ExcelConfig, CodebookConfig],
|
||||
|
|
248
data/data-pipeline/poetry.lock
generated
248
data/data-pipeline/poetry.lock
generated
|
@ -979,47 +979,50 @@ typing = ["typing-extensions (>=4.12.2)"]
|
|||
|
||||
[[package]]
|
||||
name = "fiona"
|
||||
version = "1.8.22"
|
||||
version = "1.10.1"
|
||||
description = "Fiona reads and writes spatial data files"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "Fiona-1.8.22-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:59a3800bc09ebee3516d64d02a8a6818d07ab1573c6096f3ef3468bf9f8f95f8"},
|
||||
{file = "Fiona-1.8.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:904793b17aee70ca9c3d582dbf01623eccfdeacd00c5e1a8e421be41f2e43d67"},
|
||||
{file = "Fiona-1.8.22-cp310-cp310-win_amd64.whl", hash = "sha256:df34c980cd7396adfbc89bbb363bdd6e358c76f91969fc98c9dfc076dd11638d"},
|
||||
{file = "Fiona-1.8.22-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:75924f69c51db6e258c91308780546278028c509db12aa33a47692a0266c9667"},
|
||||
{file = "Fiona-1.8.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e33860aaf70bbd2726cff12fd3857bd832b6dc2ad3ce4b27e7563bd68abdc26f"},
|
||||
{file = "Fiona-1.8.22-cp311-cp311-win_amd64.whl", hash = "sha256:18649326a7724611b16b648e14fd094089d517413b95ac91d0cdb0adc5fcb8de"},
|
||||
{file = "Fiona-1.8.22-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:c4aafdd565b3a30bdd78cafae35d4945f6741eef31401c1bb1e166b6262d7539"},
|
||||
{file = "Fiona-1.8.22-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f26c8b6ea9bc92cbd52a4dd83ffd44472450bf92f4e3d4ef2341adc2f35a54d"},
|
||||
{file = "Fiona-1.8.22-cp36-cp36m-win_amd64.whl", hash = "sha256:c28d9ffa5d230a1d9eaf571529fa9eb7573d39613354c090ad077ad153a37ee1"},
|
||||
{file = "Fiona-1.8.22-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:d47777890aa1d715025abc7a6d6b2a6bb8d2a37cc94c44ce95940b80eda21444"},
|
||||
{file = "Fiona-1.8.22-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:e3ed1c0c1c60f710a612aaeb294de54214d228c4ef40e0c1dc159e46f86a9446"},
|
||||
{file = "Fiona-1.8.22-cp37-cp37m-win_amd64.whl", hash = "sha256:ce9a22c9883cc5d11c05ba3fb9db5082044a07c6b299753ea5bb8e178b8ba53b"},
|
||||
{file = "Fiona-1.8.22-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:6ba2294bc6adcbc36229862667aac6b98e6c306e1958caf53b8bfcf9a3b8c77a"},
|
||||
{file = "Fiona-1.8.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5cad3424b7473eb0e19f17ee45abec92133a694a4b452a278f02e3b8d0f810f"},
|
||||
{file = "Fiona-1.8.22-cp38-cp38-win_amd64.whl", hash = "sha256:b88e2e6548a41c1dfa3f96c8275ff472a3edca729e14a641c0fa5b2e146a8ab5"},
|
||||
{file = "Fiona-1.8.22-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:ed75dd29c89e0e455e3a322f28cd92f192bcb8fced16e2bfb6422a7f95ffe5e9"},
|
||||
{file = "Fiona-1.8.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89cfcc3bdb4aba7bba1eb552b3866b851334693ab694529803122b21f5927960"},
|
||||
{file = "Fiona-1.8.22-cp39-cp39-win_amd64.whl", hash = "sha256:d0df3e105ad7f0cca5f16b441c232fd693ef6c4adf2c1b6271aaaa1cdc06164d"},
|
||||
{file = "Fiona-1.8.22.tar.gz", hash = "sha256:a82a99ce9b3e7825740157c45c9fb2259d4e92f0a886aaac25f0db40ffe1eea3"},
|
||||
{file = "fiona-1.10.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:6e2a94beebda24e5db8c3573fe36110d474d4a12fac0264a3e083c75e9d63829"},
|
||||
{file = "fiona-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc7366f99bdc18ec99441b9e50246fdf5e72923dc9cbb00267b2bf28edd142ba"},
|
||||
{file = "fiona-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c32f424b0641c79f4036b96c2e80322fb181b4e415c8cd02d182baef55e6730"},
|
||||
{file = "fiona-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:9a67bd88918e87d64168bc9c00d9816d8bb07353594b5ce6c57252979d5dc86e"},
|
||||
{file = "fiona-1.10.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:98fe556058b370da07a84f6537c286f87eb4af2343d155fbd3fba5d38ac17ed7"},
|
||||
{file = "fiona-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:be29044d4aeebae92944b738160dc5f9afc4cdf04f551d59e803c5b910e17520"},
|
||||
{file = "fiona-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94bd3d448f09f85439e4b77c38b9de1aebe3eef24acc72bd631f75171cdfde51"},
|
||||
{file = "fiona-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:30594c0cd8682c43fd01e7cdbe000f94540f8fa3b7cb5901e805c88c4ff2058b"},
|
||||
{file = "fiona-1.10.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7338b8c68beb7934bde4ec9f49eb5044e5e484b92d940bc3ec27defdb2b06c67"},
|
||||
{file = "fiona-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8c77fcfd3cdb0d3c97237965f8c60d1696a64923deeeb2d0b9810286cbe25911"},
|
||||
{file = "fiona-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:537872cbc9bda7fcdf73851c91bc5338fca2b502c4c17049ccecaa13cde1f18f"},
|
||||
{file = "fiona-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:41cde2c52c614457e9094ea44b0d30483540789e62fe0fa758c2a2963e980817"},
|
||||
{file = "fiona-1.10.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:a00b05935c9900678b2ca660026b39efc4e4b916983915d595964eb381763ae7"},
|
||||
{file = "fiona-1.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f78b781d5bcbbeeddf1d52712f33458775dbb9fd1b2a39882c83618348dd730f"},
|
||||
{file = "fiona-1.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ceeb38e3cd30d91d68858d0817a1bb0c4f96340d334db4b16a99edb0902d35"},
|
||||
{file = "fiona-1.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:15751c90e29cee1e01fcfedf42ab85987e32f0b593cf98d88ed52199ef5ca623"},
|
||||
{file = "fiona-1.10.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:6f1242f872dc33d3b4269dcaebf1838a359f9097e1cc848b0e11367bce010e4d"},
|
||||
{file = "fiona-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:65308b7a7e57fcc533de8a5855b0fce798faabc736d1340192dd8673ff61bc4e"},
|
||||
{file = "fiona-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632bc146355af5ff0d77e34ebd1be5072d623b4aedb754b94a3d8c356c4545ac"},
|
||||
{file = "fiona-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:b7b4c3c97b1d64a1b3321577e9edaebbd36b64006e278f225f300c497cc87c35"},
|
||||
{file = "fiona-1.10.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b62aa8d5a0981bd33d81c247219b1eaa1e655e0a0682b3a4759fccc40954bb30"},
|
||||
{file = "fiona-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f4b19cb5bd22443ef439b39239272349023556994242a8f953a0147684e1c47f"},
|
||||
{file = "fiona-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa7e7e5ad252ef29905384bf92e7d14dd5374584b525632652c2ab8925304670"},
|
||||
{file = "fiona-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:4e82d18acbe55230e9cf8ede2a836d99ea96b7c0cc7d2b8b993e6c9f0ac14dc2"},
|
||||
{file = "fiona-1.10.1.tar.gz", hash = "sha256:b00ae357669460c6491caba29c2022ff0acfcbde86a95361ea8ff5cd14a86b68"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=17"
|
||||
attrs = ">=19.2.0"
|
||||
certifi = "*"
|
||||
click = ">=4.0"
|
||||
click = ">=8.0,<9.0"
|
||||
click-plugins = ">=1.0"
|
||||
cligj = ">=0.5"
|
||||
munch = "*"
|
||||
setuptools = "*"
|
||||
six = ">=1.7"
|
||||
|
||||
[package.extras]
|
||||
all = ["boto3 (>=1.2.4)", "mock", "pytest (>=3)", "pytest-cov", "shapely"]
|
||||
calc = ["shapely"]
|
||||
s3 = ["boto3 (>=1.2.4)"]
|
||||
test = ["boto3 (>=1.2.4)", "mock", "pytest (>=3)", "pytest-cov"]
|
||||
all = ["fiona[calc,s3,test]"]
|
||||
calc = ["pyparsing", "shapely"]
|
||||
s3 = ["boto3 (>=1.3.1)"]
|
||||
test = ["aiohttp", "fiona[s3]", "fsspec", "pytest (>=7)", "pytest-cov", "pytz"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8"
|
||||
|
@ -1224,21 +1227,26 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "geopandas"
|
||||
version = "0.11.1"
|
||||
version = "1.0.1"
|
||||
description = "Geographic pandas extensions"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "geopandas-0.11.1-py3-none-any.whl", hash = "sha256:f3344937f3866e52996c7e505d56dae78be117dc840cd1c23507da0b33c0af71"},
|
||||
{file = "geopandas-0.11.1.tar.gz", hash = "sha256:f0f0c8d0423d30cf81de2056d853145c4362739350a7f8f2d72cc7409ef1eca1"},
|
||||
{file = "geopandas-1.0.1-py3-none-any.whl", hash = "sha256:01e147d9420cc374d26f51fc23716ac307f32b49406e4bd8462c07e82ed1d3d6"},
|
||||
{file = "geopandas-1.0.1.tar.gz", hash = "sha256:b8bf70a5534588205b7a56646e2082fb1de9a03599651b3d80c99ea4c2ca08ab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
fiona = ">=1.8"
|
||||
numpy = ">=1.22"
|
||||
packaging = "*"
|
||||
pandas = ">=1.0.0"
|
||||
pyproj = ">=2.6.1.post1"
|
||||
shapely = ">=1.7,<2"
|
||||
pandas = ">=1.4.0"
|
||||
pyogrio = ">=0.7.2"
|
||||
pyproj = ">=3.3.0"
|
||||
shapely = ">=2.0.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["GeoAlchemy2", "SQLAlchemy (>=1.3)", "folium", "geopy", "mapclassify", "matplotlib (>=3.5.0)", "psycopg-binary (>=3.1.0)", "pyarrow (>=8.0.0)", "xyzservices"]
|
||||
dev = ["black", "codecov", "pre-commit", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
|
@ -2550,21 +2558,6 @@ files = [
|
|||
{file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "munch"
|
||||
version = "4.0.0"
|
||||
description = "A dot-accessible dictionary (a la JavaScript objects)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "munch-4.0.0-py2.py3-none-any.whl", hash = "sha256:71033c45db9fb677a0b7eb517a4ce70ae09258490e419b0e7f00d1e386ecb1b4"},
|
||||
{file = "munch-4.0.0.tar.gz", hash = "sha256:542cb151461263216a4e37c3fd9afc425feeaf38aaa3025cd2a981fadb422235"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
testing = ["astroid (>=2.0)", "coverage", "pylint (>=2.3.1,<2.4.0)", "pytest"]
|
||||
yaml = ["PyYAML (>=5.1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.910"
|
||||
|
@ -3491,6 +3484,57 @@ tomlkit = ">=0.10.1"
|
|||
spelling = ["pyenchant (>=3.2,<4.0)"]
|
||||
testutils = ["gitpython (>3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyogrio"
|
||||
version = "0.10.0"
|
||||
description = "Vectorized spatial vector file format I/O using GDAL/OGR"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "pyogrio-0.10.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:046eeeae12a03a3ebc3dc5ff5a87664e4f5fc0a4fb1ea5d5c45d547fa941072b"},
|
||||
{file = "pyogrio-0.10.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:44380f4d9245c776f432526e29ce4d29238aea26adad991803c4f453474f51d3"},
|
||||
{file = "pyogrio-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14fd3b72b4e2dc59e264607b265c742b0c5ec2ea9e748b115f742381b28dd373"},
|
||||
{file = "pyogrio-0.10.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:1fea7892f4633cab04d13563e47ec2e87dc2b5cd71b9546018d123184528c151"},
|
||||
{file = "pyogrio-0.10.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3539596a76eb8a9d166d6f9d3f36731a8c5bd5c43901209d89dc66b9dc00f079"},
|
||||
{file = "pyogrio-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:eac90b2501656892c63bc500c12e71f3dbf7d66ddc5a7fb05cd480d25d1b7022"},
|
||||
{file = "pyogrio-0.10.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5b1a51431a27a1cb3e4e19558939c1423106e06e7b67d6285f4fba9c2d0a91b9"},
|
||||
{file = "pyogrio-0.10.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:216d69cd77b2b4a0c9d7d449bc239f8b77f3d73f4a05d9c738a0745b236902d8"},
|
||||
{file = "pyogrio-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2f0b75f0077ce33256aec6278c2a9c3b79bf0637ddf4f93d3ab2609f0501d96"},
|
||||
{file = "pyogrio-0.10.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0a47f702d29808c557d2ebea8542c23903f021eae44e16838adef2ab4281c71b"},
|
||||
{file = "pyogrio-0.10.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:11e6c71d12da6b445e77d0fc0198db1bd35a77e03a0685e45338cbab9ce02add"},
|
||||
{file = "pyogrio-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0d74e91a9c0ff2f9abe01b556ff663977193b2d6922208406172d0fc833beff"},
|
||||
{file = "pyogrio-0.10.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d6558b180e020f71ab7aa7f82d592ed3305c9f698d98f6d0a4637ec7a84c4ce"},
|
||||
{file = "pyogrio-0.10.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:a99102037eead8ba491bc57825c1e395ee31c9956d7bff7b4a9e4fdbff3a13c2"},
|
||||
{file = "pyogrio-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a4c373281d7cbf560c5b61f8f3c7442103ad7f1c7ac4ef3a84572ed7a5dd2f6"},
|
||||
{file = "pyogrio-0.10.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:19f18411bdf836d24cdc08b9337eb3ec415e4ac4086ba64516b36b73a2e88622"},
|
||||
{file = "pyogrio-0.10.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:1abbcdd9876f30bebf1df8a0273f6cdeb29d03259290008275c7fddebe139f20"},
|
||||
{file = "pyogrio-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a3e09839590d71ff832aa95c4f23fa00a2c63c3de82c1fbd4fb8d265792acfc"},
|
||||
{file = "pyogrio-0.10.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:c90478209537a31dcc65664a87a04c094bb0e08efe502908a6682b8cec0259bf"},
|
||||
{file = "pyogrio-0.10.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:fec45e1963b7058e5a1aa98598aed07c0858512c833d6aad2c672c3ec98bbf04"},
|
||||
{file = "pyogrio-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28cb139f8a5d0365ede602230104b407ae52bb6b55173c8d5a35424d28c4a2c5"},
|
||||
{file = "pyogrio-0.10.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:cea0187fcc2d574e52af8cfab041fa0a7ad71d5ef6b94b49a3f3d2a04534a27e"},
|
||||
{file = "pyogrio-0.10.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:7c02b207ea8cf09c501ea3e95d29152781a00d3c32267286bc36fa457c332205"},
|
||||
{file = "pyogrio-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:02e54bcfb305af75f829044b0045f74de31b77c2d6546f7aaf96822066147848"},
|
||||
{file = "pyogrio-0.10.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ea96a1338ed7991735b955d3f84ad5f71b3bc070b6a7a42449941aedecc71768"},
|
||||
{file = "pyogrio-0.10.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:32d349600561459791a43f528a92f3e9343a59bdc9bc30b1be9376f0b80cbf16"},
|
||||
{file = "pyogrio-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82f7bd6a87bd2e9484bcb4c87ab94eee4c2f573ad148707431c8b341d7f13d99"},
|
||||
{file = "pyogrio-0.10.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6166ae81462c257ed8e151c404e316642703813cf771c95ef8e11dcdf2581e47"},
|
||||
{file = "pyogrio-0.10.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:22d57495e835fe51b88da43dfbda606c07e1f6c3b849af0c3cfc18e17467641c"},
|
||||
{file = "pyogrio-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:eea82171bfc07fc778b8dc87b0cdc9ac06c389bc56b0c0b6f34bf9e45fb78c0e"},
|
||||
{file = "pyogrio-0.10.0.tar.gz", hash = "sha256:ec051cb568324de878828fae96379b71858933413e185148acb6c162851ab23c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
numpy = "*"
|
||||
packaging = "*"
|
||||
|
||||
[package.extras]
|
||||
benchmark = ["pytest-benchmark"]
|
||||
dev = ["cython"]
|
||||
geopandas = ["geopandas"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "pypandoc"
|
||||
version = "1.14"
|
||||
|
@ -4280,59 +4324,61 @@ type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14
|
|||
|
||||
[[package]]
|
||||
name = "shapely"
|
||||
version = "1.8.5.post1"
|
||||
description = "Geometric objects, predicates, and operations"
|
||||
version = "2.0.6"
|
||||
description = "Manipulation and analysis of geometric objects"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d048f93e42ba578b82758c15d8ae037d08e69d91d9872bca5a1895b118f4e2b0"},
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99ab0ddc05e44acabdbe657c599fdb9b2d82e86c5493bdae216c0c4018a82dee"},
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a2f0da0109e81e0c101a2b4cd8412f73f5f299e7b5b2deaf64cd2a100ac118"},
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6fe855e7d45685926b6ba00aaeb5eba5862611f7465775dacd527e081a8ced6d"},
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec14ceca36f67cb48b34d02d7f65a9acae15cd72b48e303531893ba4a960f3ea"},
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a2b2a65fa7f97115c1cd989fe9d6f39281ca2a8a014f1d4904c1a6e34d7f25"},
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-win32.whl", hash = "sha256:21776184516a16bf82a0c3d6d6a312b3cd15a4cabafc61ee01cf2714a82e8396"},
|
||||
{file = "Shapely-1.8.5.post1-cp310-cp310-win_amd64.whl", hash = "sha256:a354199219c8d836f280b88f2c5102c81bb044ccea45bd361dc38a79f3873714"},
|
||||
{file = "Shapely-1.8.5.post1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:783bad5f48e2708a0e2f695a34ed382e4162c795cb2f0368b39528ac1d6db7ed"},
|
||||
{file = "Shapely-1.8.5.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a23ef3882d6aa203dd3623a3d55d698f59bfbd9f8a3bfed52c2da05a7f0f8640"},
|
||||
{file = "Shapely-1.8.5.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab38f7b5196ace05725e407cb8cab9ff66edb8e6f7bb36a398e8f73f52a7aaa2"},
|
||||
{file = "Shapely-1.8.5.post1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d086591f744be483b34628b391d741e46f2645fe37594319e0a673cc2c26bcf"},
|
||||
{file = "Shapely-1.8.5.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4728666fff8cccc65a07448cae72c75a8773fea061c3f4f139c44adc429b18c3"},
|
||||
{file = "Shapely-1.8.5.post1-cp311-cp311-win32.whl", hash = "sha256:84010db15eb364a52b74ea8804ef92a6a930dfc1981d17a369444b6ddec66efd"},
|
||||
{file = "Shapely-1.8.5.post1-cp311-cp311-win_amd64.whl", hash = "sha256:48dcfffb9e225c0481120f4bdf622131c8c95f342b00b158cdbe220edbbe20b6"},
|
||||
{file = "Shapely-1.8.5.post1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2fd15397638df291c427a53d641d3e6fd60458128029c8c4f487190473a69a91"},
|
||||
{file = "Shapely-1.8.5.post1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a74631e511153366c6dbe3229fa93f877e3c87ea8369cd00f1d38c76b0ed9ace"},
|
||||
{file = "Shapely-1.8.5.post1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:66bdac74fbd1d3458fa787191a90fa0ae610f09e2a5ec398c36f968cc0ed743f"},
|
||||
{file = "Shapely-1.8.5.post1-cp36-cp36m-win32.whl", hash = "sha256:6d388c0c1bd878ed1af4583695690aa52234b02ed35f93a1c8486ff52a555838"},
|
||||
{file = "Shapely-1.8.5.post1-cp36-cp36m-win_amd64.whl", hash = "sha256:be9423d5a3577ac2e92c7e758bd8a2b205f5e51a012177a590bc46fc51eb4834"},
|
||||
{file = "Shapely-1.8.5.post1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5d7f85c2d35d39ff53c9216bc76b7641c52326f7e09aaad1789a3611a0f812f2"},
|
||||
{file = "Shapely-1.8.5.post1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:adcf8a11b98af9375e32bff91de184f33a68dc48b9cb9becad4f132fa25cfa3c"},
|
||||
{file = "Shapely-1.8.5.post1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:753ed0e21ab108bd4282405b9b659f2e985e8502b1a72b978eaa51d3496dee19"},
|
||||
{file = "Shapely-1.8.5.post1-cp37-cp37m-win32.whl", hash = "sha256:65b21243d8f6bcd421210daf1fabb9de84de2c04353c5b026173b88d17c1a581"},
|
||||
{file = "Shapely-1.8.5.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:370b574c78dc5af3a198a6da5d9b3d7c04654bd2ef7e80e80a3a0992dfb2d9cd"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:532a55ee2a6c52d23d6f7d1567c8f0473635f3b270262c44e1b0c88096827e22"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3480657460e939f45a7d359ef0e172a081f249312557fe9aa78c4fd3a362d993"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b65f5d530ba91e49ffc7c589255e878d2506a8b96ffce69d3b7c4500a9a9eaf8"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:147066da0be41b147a61f8eb805dea3b13709dbc873a431ccd7306e24d712bc0"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c2822111ddc5bcfb116e6c663e403579d0fe3f147d2a97426011a191c43a7458"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b47bb6f9369e8bf3e6dbd33e6a25a47ee02b2874792a529fe04a49bf8bc0df6"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-win32.whl", hash = "sha256:2e0a8c2e55f1be1312b51c92b06462ea89e6bb703fab4b114e7a846d941cfc40"},
|
||||
{file = "Shapely-1.8.5.post1-cp38-cp38-win_amd64.whl", hash = "sha256:0d885cb0cf670c1c834df3f371de8726efdf711f18e2a75da5cfa82843a7ab65"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0b4ee3132ee90f07d63db3aea316c4c065ed7a26231458dda0874414a09d6ba3"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:02dd5d7dc6e46515d88874134dc8fcdc65826bca93c3eecee59d1910c42c1b17"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c6a9a4a31cd6e86d0fbe8473ceed83d4fe760b19d949fb557ef668defafea0f6"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:38f0fbbcb8ca20c16451c966c1f527cc43968e121c8a048af19ed3e339a921cd"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:78fb9d929b8ee15cfd424b6c10879ce1907f24e05fb83310fc47d2cd27088e40"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89164e7a9776a19e29f01369a98529321994e2e4d852b92b7e01d4d9804c55bf"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-win32.whl", hash = "sha256:8e59817b0fe63d34baedaabba8c393c0090f061917d18fc0bcc2f621937a8f73"},
|
||||
{file = "Shapely-1.8.5.post1-cp39-cp39-win_amd64.whl", hash = "sha256:e9c30b311de2513555ab02464ebb76115d242842b29c412f5a9aa0cac57be9f6"},
|
||||
{file = "Shapely-1.8.5.post1.tar.gz", hash = "sha256:ef3be705c3eac282a28058e6c6e5503419b250f482320df2172abcbea642c831"},
|
||||
{file = "shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b"},
|
||||
{file = "shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b"},
|
||||
{file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde"},
|
||||
{file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e"},
|
||||
{file = "shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e"},
|
||||
{file = "shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4"},
|
||||
{file = "shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e"},
|
||||
{file = "shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2"},
|
||||
{file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855"},
|
||||
{file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0"},
|
||||
{file = "shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d"},
|
||||
{file = "shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b"},
|
||||
{file = "shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0"},
|
||||
{file = "shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3"},
|
||||
{file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8"},
|
||||
{file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726"},
|
||||
{file = "shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f"},
|
||||
{file = "shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48"},
|
||||
{file = "shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013"},
|
||||
{file = "shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7"},
|
||||
{file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381"},
|
||||
{file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805"},
|
||||
{file = "shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a"},
|
||||
{file = "shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2"},
|
||||
{file = "shapely-2.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fa7468e4f5b92049c0f36d63c3e309f85f2775752e076378e36c6387245c5462"},
|
||||
{file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5867e598a9e8ac3291da6cc9baa62ca25706eea186117034e8ec0ea4355653"},
|
||||
{file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81d9dfe155f371f78c8d895a7b7f323bb241fb148d848a2bf2244f79213123fe"},
|
||||
{file = "shapely-2.0.6-cp37-cp37m-win32.whl", hash = "sha256:fbb7bf02a7542dba55129062570211cfb0defa05386409b3e306c39612e7fbcc"},
|
||||
{file = "shapely-2.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:837d395fac58aa01aa544495b97940995211e3e25f9aaf87bc3ba5b3a8cd1ac7"},
|
||||
{file = "shapely-2.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c6d88ade96bf02f6bfd667ddd3626913098e243e419a0325ebef2bbd481d1eb6"},
|
||||
{file = "shapely-2.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b3b818c4407eaa0b4cb376fd2305e20ff6df757bf1356651589eadc14aab41b"},
|
||||
{file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbc783529a21f2bd50c79cef90761f72d41c45622b3e57acf78d984c50a5d13"},
|
||||
{file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2423f6c0903ebe5df6d32e0066b3d94029aab18425ad4b07bf98c3972a6e25a1"},
|
||||
{file = "shapely-2.0.6-cp38-cp38-win32.whl", hash = "sha256:2de00c3bfa80d6750832bde1d9487e302a6dd21d90cb2f210515cefdb616e5f5"},
|
||||
{file = "shapely-2.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:3a82d58a1134d5e975f19268710e53bddd9c473743356c90d97ce04b73e101ee"},
|
||||
{file = "shapely-2.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:392f66f458a0a2c706254f473290418236e52aa4c9b476a072539d63a2460595"},
|
||||
{file = "shapely-2.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eba5bae271d523c938274c61658ebc34de6c4b33fdf43ef7e938b5776388c1be"},
|
||||
{file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060566bc4888b0c8ed14b5d57df8a0ead5c28f9b69fb6bed4476df31c51b0af"},
|
||||
{file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b02154b3e9d076a29a8513dffcb80f047a5ea63c897c0cd3d3679f29363cf7e5"},
|
||||
{file = "shapely-2.0.6-cp39-cp39-win32.whl", hash = "sha256:44246d30124a4f1a638a7d5419149959532b99dfa25b54393512e6acc9c211ac"},
|
||||
{file = "shapely-2.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:2b542d7f1dbb89192d3512c52b679c822ba916f93479fa5d4fc2fe4fa0b3c9e8"},
|
||||
{file = "shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
numpy = ">=1.14,<3"
|
||||
|
||||
[package.extras]
|
||||
all = ["numpy", "pytest", "pytest-cov"]
|
||||
docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
vectorized = ["numpy"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
|
@ -5052,4 +5098,4 @@ test = ["mypy", "pre-commit", "pytest", "pytest-asyncio", "websockets (>=10.0)"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "2de40c45b54e3d9b7f961d15b5256d77805bef274417644e443ea9b4ca381229"
|
||||
content-hash = "3410b83dd6d5fd1e2d9e86166d39abd07ff2f5050df5786af789c3e56f2feaa6"
|
||||
|
|
|
@ -20,7 +20,7 @@ CensusData = "^1.13"
|
|||
certifi = ">= 2024.07.04" # Due to https://data.safetycli.com/v/72083/f17
|
||||
click = "8.0.4" # pinning for now per https://github.com/psf/black/issues/2964
|
||||
dynaconf = "^3.1.4"
|
||||
geopandas = "^0.11.0"
|
||||
geopandas = "^1.0.1"
|
||||
ipdb = "^0.13.9"
|
||||
ipython = "^8.11.0"
|
||||
jupyter = "^1.0.0"
|
||||
|
@ -40,7 +40,7 @@ us = "^2.0.2"
|
|||
xlsxwriter = "^2.0.0"
|
||||
pydantic = "^1.9.0"
|
||||
Rtree = "^1.0.0"
|
||||
fiona = "~1.8.21"
|
||||
fiona = "^1.10.1"
|
||||
tenacity = ">=5.0.2"
|
||||
pyarrow = "^18.1.0"
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
FROM node:latest
|
||||
FROM node:23-slim
|
||||
RUN apt-get update -y
|
||||
RUN npm install -g http-server
|
||||
CMD http-server ./ --cors
|
||||
CMD ["http-server", "./", "--cors"]
|
||||
|
|
BIN
data/data-source/fsf_fire.zip
Normal file
BIN
data/data-source/fsf_fire.zip
Normal file
Binary file not shown.
BIN
data/data-source/fsf_flood.zip
Normal file
BIN
data/data-source/fsf_flood.zip
Normal file
Binary file not shown.
|
@ -20,14 +20,17 @@ services:
|
|||
j40_score_server:
|
||||
image: j40_score_server
|
||||
container_name: j40_score_server_1
|
||||
build: data/data-serve/.
|
||||
build: data/data-serve
|
||||
volumes:
|
||||
- ./data/data-pipeline/data_pipeline/data/score:/data/data-pipeline/data_pipeline/data/score
|
||||
- ./data/data-pipeline/data_pipeline/data/tribal:/data/data-pipeline/data_pipeline/data/tribal
|
||||
ports:
|
||||
- 5000:8080
|
||||
- 5080:8080
|
||||
environment:
|
||||
TZ: America/Los_Angeles
|
||||
depends_on:
|
||||
score:
|
||||
condition: service_completed_successfully
|
||||
|
||||
#The j40_website service runs the web app / map / site
|
||||
j40_website:
|
||||
|
@ -44,4 +47,5 @@ services:
|
|||
ports:
|
||||
- 8000:6000
|
||||
depends_on:
|
||||
- "j40_score_server"
|
||||
j40_score_server:
|
||||
condition: service_started
|
||||
|
|
Loading…
Add table
Reference in a new issue