Score F, testing methodology (#510)

* fixing dependency issue

* fixing more dependencies

* including fraction of state AMI

* wip

* nitpick whitespace

* etl working now

* wip on scoring

* fix rename error

* reducing metrics

* fixing score f

* fixing readme

* adding dependency

* passing tests;

* linting/black

* removing unnecessary sample

* fixing error

* adding verify flag on etl/base

Co-authored-by: Jorge Escobar <jorge.e.escobar@omb.eop.gov>
This commit is contained in:
Lucas Merrill Brown 2021-08-24 15:40:54 -05:00 committed by GitHub
parent 043ed983ea
commit 65ceb7900f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 557 additions and 153 deletions

View file

@ -1,10 +1,16 @@
{ {
"python.formatting.provider": "black", "python.formatting.provider": "black",
"python.formatting.blackArgs": [
"--line-length=80"
],
"python.linting.enabled": true, "python.linting.enabled": true,
"python.linting.flake8Enabled": true, "python.linting.flake8Enabled": true,
"python.linting.pylintEnabled": true, "python.linting.pylintEnabled": true,
"python.testing.pytestEnabled": true, "python.testing.pytestEnabled": true,
"python.testing.pytestArgs": ["-s", "."], "python.testing.pytestArgs": [
"-s",
"."
],
"python.testing.unittestEnabled": false, "python.testing.unittestEnabled": false,
"python.testing.nosetestsEnabled": false "python.testing.nosetestsEnabled": false
} }

View file

@ -4,7 +4,7 @@
"version": "2.0.0", "version": "2.0.0",
"tasks": [ "tasks": [
{ {
"label": "test with tox", "label": "Test with tox",
"type": "shell", "type": "shell",
"command": "tox", "command": "tox",
"group": { "group": {
@ -16,19 +16,25 @@
"label": "Run Black Formatter", "label": "Run Black Formatter",
"type": "shell", "type": "shell",
"command": "black", "command": "black",
"args": ["data_pipeline"] "args": [
"data_pipeline"
]
}, },
{ {
"label": "Run Flake8 Style Enforcer", "label": "Run Flake8 Style Enforcer",
"type": "shell", "type": "shell",
"command": "black", "command": "black",
"args": ["data_pipeline"] "args": [
"data_pipeline"
]
}, },
{ {
"label": "Run Pylint", "label": "Run Pylint",
"type": "shell", "type": "shell",
"command": "pylint", "command": "pylint",
"args": ["data_pipeline"] "args": [
"data_pipeline"
]
} }
] ]
} }

View file

@ -120,17 +120,6 @@ To run this comparison tool:
1. Make sure you've gone through the above steps to run the data ETL and score generation. 1. Make sure you've gone through the above steps to run the data ETL and score generation.
1. From the package directory (`data/data-pipeline/data_pipeline/`), navigate to the `ipython` directory: `cd ipython`. 1. From the package directory (`data/data-pipeline/data_pipeline/`), navigate to the `ipython` directory: `cd ipython`.
1. Ensure you have `pandoc` installed on your computer. If you're on a Mac, run `brew install pandoc`; for other OSes, see pandoc's [installation guide](https://pandoc.org/installing.html). 1. Ensure you have `pandoc` installed on your computer. If you're on a Mac, run `brew install pandoc`; for other OSes, see pandoc's [installation guide](https://pandoc.org/installing.html).
1. Install the extra dependencies:
```python
pip install pypandoc
pip install requests
pip install us
pip install tqdm
pip install dynaconf
pip instal xlsxwriter
```
1. Start the notebooks: `jupyter notebook` 1. Start the notebooks: `jupyter notebook`
1. In your browser, navigate to one of the URLs returned by the above command. 1. In your browser, navigate to one of the URLs returned by the above command.
1. Select `scoring_comparison.ipynb` from the options in your browser. 1. Select `scoring_comparison.ipynb` from the options in your browser.

View file

@ -1,4 +1,5 @@
from pathlib import Path from pathlib import Path
from typing import Optional
from data_pipeline.config import settings from data_pipeline.config import settings
from data_pipeline.utils import unzip_file_from_url, remove_all_from_dir from data_pipeline.utils import unzip_file_from_url, remove_all_from_dir
@ -33,14 +34,21 @@ class ExtractTransformLoad:
pass pass
def extract(self, source_url: str = None, extract_path: Path = None) -> None: def extract(
self,
source_url: str = None,
extract_path: Path = None,
verify: Optional[bool] = True,
) -> None:
"""Extract the data from """Extract the data from
a remote source. By default it provides code to get the file from a source url, a remote source. By default it provides code to get the file from a source url,
unzips it and stores it on an extract_path.""" unzips it and stores it on an extract_path."""
# this can be accessed via super().extract() # this can be accessed via super().extract()
if source_url and extract_path: if source_url and extract_path:
unzip_file_from_url(source_url, self.TMP_PATH, extract_path) unzip_file_from_url(
source_url, self.TMP_PATH, extract_path, verify=verify
)
def transform(self) -> None: def transform(self) -> None:
"""Transform the data extracted into a format that can be consumed by the """Transform the data extracted into a format that can be consumed by the

View file

@ -34,6 +34,11 @@ DATASET_LIST = [
"module_dir": "hud_recap", "module_dir": "hud_recap",
"class_name": "HudRecapETL", "class_name": "HudRecapETL",
}, },
{
"name": "cdc_places",
"module_dir": "cdc_places",
"class_name": "CDCPlacesETL",
},
] ]
CENSUS_INFO = { CENSUS_INFO = {
"name": "census", "name": "census",

View file

@ -50,6 +50,7 @@ class ScoreETL(ExtractTransformLoad):
self.census_df: pd.DataFrame self.census_df: pd.DataFrame
self.housing_and_transportation_df: pd.DataFrame self.housing_and_transportation_df: pd.DataFrame
self.hud_housing_df: pd.DataFrame self.hud_housing_df: pd.DataFrame
self.cdc_places_df: pd.DataFrame
def data_sets(self) -> list: def data_sets(self) -> list:
# Define a named tuple that will be used for each data set input. # Define a named tuple that will be used for each data set input.
@ -81,6 +82,36 @@ class ScoreETL(ExtractTransformLoad):
renamed_field=self.MEDIAN_INCOME_AS_PERCENT_OF_STATE_FIELD_NAME, renamed_field=self.MEDIAN_INCOME_AS_PERCENT_OF_STATE_FIELD_NAME,
bucket=None, bucket=None,
), ),
DataSet(
input_field="Current asthma among adults aged >=18 years",
renamed_field="Current asthma among adults aged >=18 years",
bucket=None,
),
DataSet(
input_field="Coronary heart disease among adults aged >=18 years",
renamed_field="Coronary heart disease among adults aged >=18 years",
bucket=None,
),
DataSet(
input_field="Cancer (excluding skin cancer) among adults aged >=18 years",
renamed_field="Cancer (excluding skin cancer) among adults aged >=18 years",
bucket=None,
),
DataSet(
input_field="Current lack of health insurance among adults aged 18-64 years",
renamed_field="Current lack of health insurance among adults aged 18-64 years",
bucket=None,
),
DataSet(
input_field="Diagnosed diabetes among adults aged >=18 years",
renamed_field="Diagnosed diabetes among adults aged >=18 years",
bucket=None,
),
DataSet(
input_field="Physical health not good for >=14 days among adults aged >=18 years",
renamed_field="Physical health not good for >=14 days among adults aged >=18 years",
bucket=None,
),
# The following data sets have buckets, because they're used in Score C # The following data sets have buckets, because they're used in Score C
DataSet( DataSet(
input_field="CANCER", input_field="CANCER",
@ -218,6 +249,14 @@ class ScoreETL(ExtractTransformLoad):
low_memory=False, low_memory=False,
) )
# Load CDC Places data
cdc_places_csv = self.DATA_PATH / "dataset" / "cdc_places" / "usa.csv"
self.cdc_places_df = pd.read_csv(
cdc_places_csv,
dtype={self.GEOID_TRACT_FIELD_NAME: "string"},
low_memory=False,
)
def transform(self) -> None: def transform(self) -> None:
## IMPORTANT: THIS METHOD IS CLOSE TO THE LIMIT OF STATEMENTS ## IMPORTANT: THIS METHOD IS CLOSE TO THE LIMIT OF STATEMENTS
@ -247,8 +286,28 @@ class ScoreETL(ExtractTransformLoad):
) )
# Join all the data sources that use census tracts # Join all the data sources that use census tracts
# TODO: when there's more than one data source using census tract, reduce/merge them here. census_tract_dfs = [
census_tract_df = self.hud_housing_df self.hud_housing_df,
self.cdc_places_df,
]
census_tract_df = functools.reduce(
lambda left, right: pd.merge(
left=left,
right=right,
on=self.GEOID_TRACT_FIELD_NAME,
how="outer",
),
census_tract_dfs,
)
# Sanity check the join.
if (
len(census_tract_df[self.GEOID_TRACT_FIELD_NAME].str.len().unique())
!= 1
):
raise ValueError(
f"One of the input CSVs uses {self.GEOID_TRACT_FIELD_NAME} with a different length."
)
# Calculate the tract for the CBG data. # Calculate the tract for the CBG data.
census_block_group_df[ census_block_group_df[
@ -437,12 +496,56 @@ class ScoreETL(ExtractTransformLoad):
) )
self.df[meets_burden_field_name] = ( self.df[meets_burden_field_name] = (
self.df["Particulate matter (PM2.5)"] > 10 (self.df["Particulate matter (PM2.5) (percentile)"] > 0.9)
) | (self.df["Respiratory hazard " "index"] > 0.75) | (self.df["Respiratory hazard index (percentile)"] > 0.9)
| (self.df["Traffic proximity and volume (percentile)"] > 0.9)
| (
self.df[
"Percent pre-1960s housing (lead paint indicator) (percentile)"
]
> 0.9
)
| (self.df["Proximity to RMP sites (percentile)"] > 0.9)
| (
self.df[
"Current asthma among adults aged >=18 years (percentile)"
]
> 0.9
)
| (
self.df[
"Coronary heart disease among adults aged >=18 years (percentile)"
]
> 0.9
)
| (
self.df[
"Cancer (excluding skin cancer) among adults aged >=18 years (percentile)"
]
> 0.9
)
# | (
# self.df[
# "Current lack of health insurance among adults aged 18-64 years (percentile)"
# ]
# > 0.9
# )
| (
self.df[
"Diagnosed diabetes among adults aged >=18 years (percentile)"
]
> 0.9
)
# | (
# self.df[
# "Physical health not good for >=14 days among adults aged >=18 years (percentile)"
# ]
# > 0.9
# )
)
self.df["Score F (communities)"] = ( self.df["Score F (communities)"] = (
self.df[ami_and_high_school_field_name] self.df[meets_socio_field_name] & self.df[meets_burden_field_name]
& self.df[meets_burden_field_name]
) )
def load(self) -> None: def load(self) -> None:
@ -450,10 +553,4 @@ class ScoreETL(ExtractTransformLoad):
# write nationwide csv # write nationwide csv
self.SCORE_CSV_PATH.mkdir(parents=True, exist_ok=True) self.SCORE_CSV_PATH.mkdir(parents=True, exist_ok=True)
# TODO: drop
self.df[0:10000].to_csv(
self.SCORE_CSV_PATH / "usa-10000.csv", index=False
)
self.df.to_csv(self.SCORE_CSV_PATH / "usa.csv", index=False) self.df.to_csv(self.SCORE_CSV_PATH / "usa.csv", index=False)

View file

@ -10,14 +10,19 @@ logger = get_module_logger(__name__)
class CalEnviroScreenETL(ExtractTransformLoad): class CalEnviroScreenETL(ExtractTransformLoad):
def __init__(self): def __init__(self):
self.CALENVIROSCREEN_FTP_URL = ( self.CALENVIROSCREEN_FTP_URL = (
settings.AWS_JUSTICE40_DATASOURCES_URL + "/CalEnviroScreen_4.0_2021.zip" settings.AWS_JUSTICE40_DATASOURCES_URL
+ "/CalEnviroScreen_4.0_2021.zip"
)
self.CALENVIROSCREEN_CSV = (
self.TMP_PATH / "CalEnviroScreen_4.0_2021.csv"
) )
self.CALENVIROSCREEN_CSV = self.TMP_PATH / "CalEnviroScreen_4.0_2021.csv"
self.CSV_PATH = self.DATA_PATH / "dataset" / "calenviroscreen4" self.CSV_PATH = self.DATA_PATH / "dataset" / "calenviroscreen4"
# Definining some variable names # Definining some variable names
self.CALENVIROSCREEN_SCORE_FIELD_NAME = "calenviroscreen_score" self.CALENVIROSCREEN_SCORE_FIELD_NAME = "calenviroscreen_score"
self.CALENVIROSCREEN_PERCENTILE_FIELD_NAME = "calenviroscreen_percentile" self.CALENVIROSCREEN_PERCENTILE_FIELD_NAME = (
"calenviroscreen_percentile"
)
self.CALENVIROSCREEN_PRIORITY_COMMUNITY_FIELD_NAME = ( self.CALENVIROSCREEN_PRIORITY_COMMUNITY_FIELD_NAME = (
"calenviroscreen_priority_community" "calenviroscreen_priority_community"
) )

View file

@ -0,0 +1,66 @@
import pandas as pd
from data_pipeline.etl.base import ExtractTransformLoad
from data_pipeline.utils import get_module_logger, download_file_from_url
logger = get_module_logger(__name__)
class CDCPlacesETL(ExtractTransformLoad):
def __init__(self):
self.OUTPUT_PATH = self.DATA_PATH / "dataset" / "cdc_places"
self.CDC_PLACES_URL = "https://chronicdata.cdc.gov/api/views/cwsq-ngmh/rows.csv?accessType=DOWNLOAD"
self.CDC_GEOID_FIELD_NAME = "LocationID"
self.CDC_VALUE_FIELD_NAME = "Data_Value"
self.CDC_MEASURE_FIELD_NAME = "Measure"
self.df: pd.DataFrame
def extract(self) -> None:
logger.info("Starting to download 520MB CDC Places file.")
file_path = download_file_from_url(
file_url=self.CDC_PLACES_URL,
download_file_name=self.TMP_PATH
/ "cdc_places"
/ "census_tract.csv",
)
self.df = pd.read_csv(
filepath_or_buffer=file_path,
dtype={self.CDC_GEOID_FIELD_NAME: "string"},
low_memory=False,
)
def transform(self) -> None:
logger.info("Starting CDC Places transform")
# Rename GEOID field
self.df.rename(
columns={self.CDC_GEOID_FIELD_NAME: self.GEOID_TRACT_FIELD_NAME},
inplace=True,
errors="raise",
)
# Note: Puerto Rico not included.
self.df = self.df.pivot(
index=self.GEOID_TRACT_FIELD_NAME,
columns=self.CDC_MEASURE_FIELD_NAME,
values=self.CDC_VALUE_FIELD_NAME,
)
# Make the index (the census tract ID) a column, not the index.
self.df.reset_index(inplace=True)
def load(self) -> None:
logger.info("Saving CDC Places Data")
# mkdir census
self.OUTPUT_PATH.mkdir(parents=True, exist_ok=True)
self.df.to_csv(path_or_buf=self.OUTPUT_PATH / "usa.csv", index=False)
def validate(self) -> None:
logger.info("Validating Census ACS Data")
pass

View file

@ -33,7 +33,9 @@ class CensusETL(ExtractTransformLoad):
self.NATIONAL_CBG_CSV_PATH = self.CSV_BASE_PATH / "us.csv" self.NATIONAL_CBG_CSV_PATH = self.CSV_BASE_PATH / "us.csv"
self.NATIONAL_CBG_JSON_PATH = self.GEOJSON_BASE_PATH / "us.json" self.NATIONAL_CBG_JSON_PATH = self.GEOJSON_BASE_PATH / "us.json"
def _path_for_fips_file(self, fips_code: str, file_type: GeoFileType) -> Path: def _path_for_fips_file(
self, fips_code: str, file_type: GeoFileType
) -> Path:
"""Get paths for associated geospatial files for the provided FIPS code """Get paths for associated geospatial files for the provided FIPS code
Args: Args:
@ -93,7 +95,9 @@ class CensusETL(ExtractTransformLoad):
None None
""" """
shp_file_path = self._path_for_fips_file(fips_code, GeoFileType.SHP) shp_file_path = self._path_for_fips_file(fips_code, GeoFileType.SHP)
geojson_file_path = self._path_for_fips_file(fips_code, GeoFileType.GEOJSON) geojson_file_path = self._path_for_fips_file(
fips_code, GeoFileType.GEOJSON
)
logger.info(f"Checking if {fips_code} geoJSON file exists ") logger.info(f"Checking if {fips_code} geoJSON file exists ")
if not geojson_file_path.is_file(): if not geojson_file_path.is_file():
logger.info( logger.info(
@ -176,7 +180,9 @@ class CensusETL(ExtractTransformLoad):
if not self.NATIONAL_CBG_CSV_PATH.is_file(): if not self.NATIONAL_CBG_CSV_PATH.is_file():
logger.info(f"Creating {self.NATIONAL_CBG_CSV_PATH}") logger.info(f"Creating {self.NATIONAL_CBG_CSV_PATH}")
with open(self.NATIONAL_CBG_CSV_PATH, mode="w", newline="") as cbg_csv_file: with open(
self.NATIONAL_CBG_CSV_PATH, mode="w", newline=""
) as cbg_csv_file:
cbg_csv_file_writer = csv.writer( cbg_csv_file_writer = csv.writer(
cbg_csv_file, cbg_csv_file,
delimiter=",", delimiter=",",
@ -205,7 +211,9 @@ class CensusETL(ExtractTransformLoad):
state_gdf = gpd.read_file(file_name) state_gdf = gpd.read_file(file_name)
usa_df = usa_df.append(state_gdf) usa_df = usa_df.append(state_gdf)
usa_df = usa_df.to_crs("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") usa_df = usa_df.to_crs(
"+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
)
logger.info("Writing national geojson file") logger.info("Writing national geojson file")
usa_df.to_file(self.NATIONAL_CBG_JSON_PATH, driver="GeoJSON") usa_df.to_file(self.NATIONAL_CBG_JSON_PATH, driver="GeoJSON")

View file

@ -41,10 +41,10 @@ class CensusACSETL(ExtractTransformLoad):
self.STATE_MEDIAN_INCOME_FTP_URL = ( self.STATE_MEDIAN_INCOME_FTP_URL = (
settings.AWS_JUSTICE40_DATASOURCES_URL settings.AWS_JUSTICE40_DATASOURCES_URL
+ "/2014_to_2019_state_median_income.zip" + "/2015_to_2019_state_median_income.zip"
) )
self.STATE_MEDIAN_INCOME_FILE_PATH = ( self.STATE_MEDIAN_INCOME_FILE_PATH = (
self.TMP_PATH / "2014_to_2019_state_median_income.csv" self.TMP_PATH / "2015_to_2019_state_median_income.csv"
) )
def _fips_from_censusdata_censusgeo( def _fips_from_censusdata_censusgeo(

View file

@ -8,9 +8,7 @@ logger = get_module_logger(__name__)
class EJScreenETL(ExtractTransformLoad): class EJScreenETL(ExtractTransformLoad):
def __init__(self): def __init__(self):
self.EJSCREEN_FTP_URL = ( self.EJSCREEN_FTP_URL = "https://gaftp.epa.gov/EJSCREEN/2019/EJSCREEN_2019_StatePctile.csv.zip"
"https://gaftp.epa.gov/EJSCREEN/2019/EJSCREEN_2019_StatePctile.csv.zip"
)
self.EJSCREEN_CSV = self.TMP_PATH / "EJSCREEN_2019_StatePctiles.csv" self.EJSCREEN_CSV = self.TMP_PATH / "EJSCREEN_2019_StatePctiles.csv"
self.CSV_PATH = self.DATA_PATH / "dataset" / "ejscreen_2019" self.CSV_PATH = self.DATA_PATH / "dataset" / "ejscreen_2019"
self.df: pd.DataFrame self.df: pd.DataFrame
@ -20,6 +18,7 @@ class EJScreenETL(ExtractTransformLoad):
super().extract( super().extract(
self.EJSCREEN_FTP_URL, self.EJSCREEN_FTP_URL,
self.TMP_PATH, self.TMP_PATH,
verify=False, # EPA EJScreen end point has certificate issues often
) )
def transform(self) -> None: def transform(self) -> None:

View file

@ -35,7 +35,9 @@ class HousingTransportationETL(ExtractTransformLoad):
) )
# New file name: # New file name:
tmp_csv_file_path = zip_file_dir / f"htaindex_data_blkgrps_{fips}.csv" tmp_csv_file_path = (
zip_file_dir / f"htaindex_data_blkgrps_{fips}.csv"
)
tmp_df = pd.read_csv(filepath_or_buffer=tmp_csv_file_path) tmp_df = pd.read_csv(filepath_or_buffer=tmp_csv_file_path)
dfs.append(tmp_df) dfs.append(tmp_df)
@ -47,9 +49,9 @@ class HousingTransportationETL(ExtractTransformLoad):
# Rename and reformat block group ID # Rename and reformat block group ID
self.df.rename(columns={"blkgrp": self.GEOID_FIELD_NAME}, inplace=True) self.df.rename(columns={"blkgrp": self.GEOID_FIELD_NAME}, inplace=True)
self.df[self.GEOID_FIELD_NAME] = self.df[self.GEOID_FIELD_NAME].str.replace( self.df[self.GEOID_FIELD_NAME] = self.df[
'"', "" self.GEOID_FIELD_NAME
) ].str.replace('"', "")
def load(self) -> None: def load(self) -> None:
logger.info("Saving Housing and Transportation Data") logger.info("Saving Housing and Transportation Data")

View file

@ -9,16 +9,16 @@ class HudHousingETL(ExtractTransformLoad):
def __init__(self): def __init__(self):
self.OUTPUT_PATH = self.DATA_PATH / "dataset" / "hud_housing" self.OUTPUT_PATH = self.DATA_PATH / "dataset" / "hud_housing"
self.GEOID_TRACT_FIELD_NAME = "GEOID10_TRACT" self.GEOID_TRACT_FIELD_NAME = "GEOID10_TRACT"
self.HOUSING_FTP_URL = ( self.HOUSING_FTP_URL = "https://www.huduser.gov/portal/datasets/cp/2012thru2016-140-csv.zip"
"https://www.huduser.gov/portal/datasets/cp/2012thru2016-140-csv.zip"
)
self.HOUSING_ZIP_FILE_DIR = self.TMP_PATH / "hud_housing" self.HOUSING_ZIP_FILE_DIR = self.TMP_PATH / "hud_housing"
# We measure households earning less than 80% of HUD Area Median Family Income by county # We measure households earning less than 80% of HUD Area Median Family Income by county
# and paying greater than 30% of their income to housing costs. # and paying greater than 30% of their income to housing costs.
self.HOUSING_BURDEN_FIELD_NAME = "Housing burden (percent)" self.HOUSING_BURDEN_FIELD_NAME = "Housing burden (percent)"
self.HOUSING_BURDEN_NUMERATOR_FIELD_NAME = "HOUSING_BURDEN_NUMERATOR" self.HOUSING_BURDEN_NUMERATOR_FIELD_NAME = "HOUSING_BURDEN_NUMERATOR"
self.HOUSING_BURDEN_DENOMINATOR_FIELD_NAME = "HOUSING_BURDEN_DENOMINATOR" self.HOUSING_BURDEN_DENOMINATOR_FIELD_NAME = (
"HOUSING_BURDEN_DENOMINATOR"
)
# Note: some variable definitions. # Note: some variable definitions.
# HUD-adjusted median family income (HAMFI). # HUD-adjusted median family income (HAMFI).
@ -55,7 +55,9 @@ class HudHousingETL(ExtractTransformLoad):
) )
# Rename and reformat block group ID # Rename and reformat block group ID
self.df.rename(columns={"geoid": self.GEOID_TRACT_FIELD_NAME}, inplace=True) self.df.rename(
columns={"geoid": self.GEOID_TRACT_FIELD_NAME}, inplace=True
)
# The CHAS data has census tract ids such as `14000US01001020100` # The CHAS data has census tract ids such as `14000US01001020100`
# Whereas the rest of our data uses, for the same tract, `01001020100`. # Whereas the rest of our data uses, for the same tract, `01001020100`.
@ -273,7 +275,9 @@ class HudHousingETL(ExtractTransformLoad):
# TODO: add small sample size checks # TODO: add small sample size checks
self.df[self.HOUSING_BURDEN_FIELD_NAME] = self.df[ self.df[self.HOUSING_BURDEN_FIELD_NAME] = self.df[
self.HOUSING_BURDEN_NUMERATOR_FIELD_NAME self.HOUSING_BURDEN_NUMERATOR_FIELD_NAME
].astype(float) / self.df[self.HOUSING_BURDEN_DENOMINATOR_FIELD_NAME].astype( ].astype(float) / self.df[
self.HOUSING_BURDEN_DENOMINATOR_FIELD_NAME
].astype(
float float
) )

View file

@ -18,7 +18,9 @@ class HudRecapETL(ExtractTransformLoad):
self.CSV_PATH = self.DATA_PATH / "dataset" / "hud_recap" self.CSV_PATH = self.DATA_PATH / "dataset" / "hud_recap"
# Definining some variable names # Definining some variable names
self.HUD_RECAP_PRIORITY_COMMUNITY_FIELD_NAME = "hud_recap_priority_community" self.HUD_RECAP_PRIORITY_COMMUNITY_FIELD_NAME = (
"hud_recap_priority_community"
)
self.df: pd.DataFrame self.df: pd.DataFrame

View file

@ -8,9 +8,7 @@ logger = get_module_logger(__name__)
class TreeEquityScoreETL(ExtractTransformLoad): class TreeEquityScoreETL(ExtractTransformLoad):
def __init__(self): def __init__(self):
self.TES_URL = ( self.TES_URL = "https://national-tes-data-share.s3.amazonaws.com/national_tes_share/"
"https://national-tes-data-share.s3.amazonaws.com/national_tes_share/"
)
self.TES_CSV = self.TMP_PATH / "tes_2021_data.csv" self.TES_CSV = self.TMP_PATH / "tes_2021_data.csv"
self.CSV_PATH = self.DATA_PATH / "dataset" / "tree_equity_score" self.CSV_PATH = self.DATA_PATH / "dataset" / "tree_equity_score"
self.df: gpd.GeoDataFrame self.df: gpd.GeoDataFrame
@ -78,8 +76,12 @@ class TreeEquityScoreETL(ExtractTransformLoad):
logger.info("Transforming Tree Equity Score Data") logger.info("Transforming Tree Equity Score Data")
tes_state_dfs = [] tes_state_dfs = []
for state in self.states: for state in self.states:
tes_state_dfs.append(gpd.read_file(f"{self.TMP_PATH}/{state}/{state}.shp")) tes_state_dfs.append(
self.df = gpd.GeoDataFrame(pd.concat(tes_state_dfs), crs=tes_state_dfs[0].crs) gpd.read_file(f"{self.TMP_PATH}/{state}/{state}.shp")
)
self.df = gpd.GeoDataFrame(
pd.concat(tes_state_dfs), crs=tes_state_dfs[0].crs
)
def load(self) -> None: def load(self) -> None:
logger.info("Saving Tree Equity Score GeoJSON") logger.info("Saving Tree Equity Score GeoJSON")

View file

@ -3,9 +3,6 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "0491828b",
"metadata": {},
"outputs": [],
"source": [ "source": [
"import pandas as pd\n", "import pandas as pd\n",
"import censusdata\n", "import censusdata\n",
@ -32,30 +29,26 @@
"# Some display settings to make pandas outputs more readable.\n", "# Some display settings to make pandas outputs more readable.\n",
"pd.set_option(\"display.expand_frame_repr\", False)\n", "pd.set_option(\"display.expand_frame_repr\", False)\n",
"pd.set_option(\"display.precision\", 2)" "pd.set_option(\"display.precision\", 2)"
] ],
"outputs": [],
"metadata": {}
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "654f25a1",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [ "source": [
"# Following the tutorial at https://jtleider.github.io/censusdata/example1.html.\n", "# Following the tutorial at https://jtleider.github.io/censusdata/example1.html.\n",
"# Full list of fields is at https://www2.census.gov/programs-surveys/acs/summary_file/2019/documentation/user_tools/ACS2019_Table_Shells.xlsx\n", "# Full list of fields is at https://www2.census.gov/programs-surveys/acs/summary_file/2019/documentation/user_tools/ACS2019_Table_Shells.xlsx\n",
"censusdata.printtable(censusdata.censustable(src=\"acs5\", year=ACS_YEAR, table=\"B19013\"))" "censusdata.printtable(censusdata.censustable(src=\"acs5\", year=ACS_YEAR, table=\"B19013\"))"
] ],
"outputs": [],
"metadata": {
"scrolled": true
}
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "8999cea4",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [ "source": [
"def fips_from_censusdata_censusgeo(censusgeo: censusdata.censusgeo) -> str:\n", "def fips_from_censusdata_censusgeo(censusgeo: censusdata.censusgeo) -> str:\n",
" \"\"\"Create a FIPS code from the proprietary censusgeo index.\"\"\"\n", " \"\"\"Create a FIPS code from the proprietary censusgeo index.\"\"\"\n",
@ -85,31 +78,33 @@
"df[GEOID_FIELD_NAME] = df.index.to_series().apply(func=fips_from_censusdata_censusgeo)\n", "df[GEOID_FIELD_NAME] = df.index.to_series().apply(func=fips_from_censusdata_censusgeo)\n",
"\n", "\n",
"df.head()" "df.head()"
] ],
"outputs": [],
"metadata": {
"scrolled": true
}
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "2a269bb1",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [ "source": [
"columns_to_include = [\"GEOID2\", \"Median household income (State)\"]\n", "columns_to_include = [\"GEOID2\", \"Median household income (State)\"]\n",
"\n", "\n",
"df.rename(columns={\"GEOID10\": \"GEOID2\", \"B19013_001E\": \"Median household income (State)\"}, inplace=True)\n", "df.rename(columns={\"GEOID10\": \"GEOID2\", \"B19013_001E\": \"Median household income (State)\"}, inplace=True)\n",
"\n", "\n",
"df[columns_to_include].to_csv(path_or_buf= \"/Users/lucas/Documents/usds/repos/justice40-tool/data/data-pipeline/data_pipeline/data/needs_to_be_moved_to_s3/2014_to_2019_state_median_income.csv\", index=False)" "# df[columns_to_include].to_csv(path_or_buf= \"/Users/lucas/Documents/usds/repos/justice40-tool/data/data-pipeline/data_pipeline/data/needs_to_be_moved_to_s3/2014_to_2019_state_median_income.csv\", index=False)"
] ],
"outputs": [],
"metadata": {
"scrolled": true
}
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "91932af5", "source": [],
"metadata": {},
"outputs": [], "outputs": [],
"source": [] "metadata": {}
} }
], ],
"metadata": { "metadata": {
@ -133,4 +128,4 @@
}, },
"nbformat": 4, "nbformat": 4,
"nbformat_minor": 5 "nbformat_minor": 5
} }

View file

@ -28,7 +28,7 @@
"from datetime import datetime\n", "from datetime import datetime\n",
"from tqdm.notebook import tqdm_notebook\n", "from tqdm.notebook import tqdm_notebook\n",
"\n", "\n",
"module_path = os.path.abspath(os.path.join(\"..\"))\n", "module_path = os.path.abspath(os.path.join(\"../..\"))\n",
"if module_path not in sys.path:\n", "if module_path not in sys.path:\n",
" sys.path.append(module_path)\n", " sys.path.append(module_path)\n",
"\n", "\n",
@ -215,7 +215,7 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "8b795fb4", "id": "274f6bc6",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
@ -234,6 +234,21 @@
"# (`census_tract_indices`).\n", "# (`census_tract_indices`).\n",
"census_block_group_indices = [\n", "census_block_group_indices = [\n",
" Index(\n", " Index(\n",
" method_name=\"Score F\",\n",
" priority_communities_field=\"Score F (communities)\",\n",
" other_census_tract_fields_to_keep=[],\n",
" ),\n",
" Index(\n",
" method_name=\"Score F (socioeconomic only)\",\n",
" priority_communities_field=\"Meets socioeconomic criteria\",\n",
" other_census_tract_fields_to_keep=[],\n",
" ),\n",
" Index(\n",
" method_name=\"Score F (burden only)\",\n",
" priority_communities_field=\"Meets burden criteria\",\n",
" other_census_tract_fields_to_keep=[],\n",
" ),\n",
" Index(\n",
" method_name=\"Score A\",\n", " method_name=\"Score A\",\n",
" priority_communities_field=\"Score A (top 25th percentile)\",\n", " priority_communities_field=\"Score A (top 25th percentile)\",\n",
" other_census_tract_fields_to_keep=[],\n", " other_census_tract_fields_to_keep=[],\n",
@ -253,21 +268,21 @@
" priority_communities_field=\"Score D (top 25th percentile)\",\n", " priority_communities_field=\"Score D (top 25th percentile)\",\n",
" other_census_tract_fields_to_keep=[],\n", " other_census_tract_fields_to_keep=[],\n",
" ),\n", " ),\n",
" Index(\n", "# Index(\n",
" method_name=\"Score D (30th percentile)\",\n", "# method_name=\"Score D (30th percentile)\",\n",
" priority_communities_field=\"Score D (top 30th percentile)\",\n", "# priority_communities_field=\"Score D (top 30th percentile)\",\n",
" other_census_tract_fields_to_keep=[],\n", "# other_census_tract_fields_to_keep=[],\n",
" ),\n", "# ),\n",
" Index(\n", "# Index(\n",
" method_name=\"Score D (35th percentile)\",\n", "# method_name=\"Score D (35th percentile)\",\n",
" priority_communities_field=\"Score D (top 35th percentile)\",\n", "# priority_communities_field=\"Score D (top 35th percentile)\",\n",
" other_census_tract_fields_to_keep=[],\n", "# other_census_tract_fields_to_keep=[],\n",
" ),\n", "# ),\n",
" Index(\n", "# Index(\n",
" method_name=\"Score D (40th percentile)\",\n", "# method_name=\"Score D (40th percentile)\",\n",
" priority_communities_field=\"Score D (top 40th percentile)\",\n", "# priority_communities_field=\"Score D (top 40th percentile)\",\n",
" other_census_tract_fields_to_keep=[],\n", "# other_census_tract_fields_to_keep=[],\n",
" ),\n", "# ),\n",
" Index(\n", " Index(\n",
" method_name=\"Poverty\",\n", " method_name=\"Poverty\",\n",
" priority_communities_field=\"Poverty (Less than 200% of federal poverty line) (top 25th percentile)\",\n", " priority_communities_field=\"Poverty (Less than 200% of federal poverty line) (top 25th percentile)\",\n",
@ -534,7 +549,7 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "d7acf80d", "id": "eeb9699d",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
@ -682,7 +697,7 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "777a4623", "id": "4f44426c",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
@ -1140,14 +1155,6 @@
"\n", "\n",
"print(file_paths)" "print(file_paths)"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e679502a",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {

View file

@ -98,11 +98,50 @@ def remove_all_dirs_from_dir(dir_path: Path) -> None:
logging.info(f"Removing directory {file_path}") logging.info(f"Removing directory {file_path}")
def download_file_from_url(
file_url: str,
download_file_name: Path,
verify: bool = True,
) -> str:
"""Downloads a file from a remote URL location and returns the file location.
Args:
file_url (str): URL where the zip file is located
download_file_name (pathlib.Path): file path where the file will be downloaded (called downloaded.zip by default)
verify (bool): A flag to check if the certificate is valid. If truthy, an invalid certificate will throw an error (optional, default to False)
Returns:
None
"""
# disable https warning
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if not os.path.isdir(download_file_name.parent):
os.mkdir(download_file_name.parent)
logger.info(f"Downloading {file_url}")
response = requests.get(file_url, verify=verify)
if response.status_code == 200:
file_contents = response.content
else:
sys.exit(
f"HTTP response {response.status_code} from url {file_url}. Info: {response.content}"
)
# Write the contents to disk.
file = open(download_file_name, "wb")
file.write(file_contents)
file.close()
return download_file_name
def unzip_file_from_url( def unzip_file_from_url(
file_url: str, file_url: str,
download_path: Path, download_path: Path,
unzipped_file_path: Path, unzipped_file_path: Path,
verify: bool = False, verify: bool = True,
) -> None: ) -> None:
"""Downloads a zip file from a remote URL location and unzips it in a specific directory, removing the temporary file after """Downloads a zip file from a remote URL location and unzips it in a specific directory, removing the temporary file after
@ -116,23 +155,11 @@ def unzip_file_from_url(
None None
""" """
zip_file_path = download_file_from_url(
# disable https warning file_url=file_url,
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) download_file_name=download_path / "downloaded.zip",
verify=verify,
logger.info(f"Downloading {file_url}") )
response = requests.get(file_url, verify=verify)
if response.status_code == 200:
file_contents = response.content
else:
sys.exit(
f"HTTP response {response.status_code} from url {file_url}. Info: {response.content}"
)
zip_file_path = download_path / "downloaded.zip"
zip_file = open(zip_file_path, "wb")
zip_file.write(file_contents)
zip_file.close()
logger.info(f"Extracting {zip_file_path}") logger.info(f"Extracting {zip_file_path}")
with zipfile.ZipFile(zip_file_path, "r") as zip_ref: with zipfile.ZipFile(zip_file_path, "r") as zip_ref:

View file

@ -14,6 +14,20 @@ category = "main"
optional = false optional = false
python-versions = "*" python-versions = "*"
[[package]]
name = "argcomplete"
version = "1.12.3"
description = "Bash tab completion for argparse"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
importlib-metadata = {version = ">=0.23,<5", markers = "python_version == \"3.7\""}
[package.extras]
test = ["coverage", "flake8", "pexpect", "wheel"]
[[package]] [[package]]
name = "argon2-cffi" name = "argon2-cffi"
version = "20.1.0" version = "20.1.0"
@ -237,6 +251,17 @@ python-versions = ">=3.6"
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"]
[[package]]
name = "cycler"
version = "0.10.0"
description = "Composable style cycles"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
six = "*"
[[package]] [[package]]
name = "debugpy" name = "debugpy"
version = "1.4.1" version = "1.4.1"
@ -387,7 +412,7 @@ python-versions = ">=3.5"
[[package]] [[package]]
name = "importlib-metadata" name = "importlib-metadata"
version = "3.10.1" version = "4.6.3"
description = "Read metadata from Python packages" description = "Read metadata from Python packages"
category = "main" category = "main"
optional = false optional = false
@ -399,7 +424,8 @@ zipp = ">=0.5"
[package.extras] [package.extras]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] perf = ["ipython"]
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
[[package]] [[package]]
name = "iniconfig" name = "iniconfig"
@ -411,7 +437,7 @@ python-versions = "*"
[[package]] [[package]]
name = "ipykernel" name = "ipykernel"
version = "6.0.3" version = "6.1.0"
description = "IPython Kernel for Jupyter" description = "IPython Kernel for Jupyter"
category = "main" category = "main"
optional = false optional = false
@ -419,8 +445,9 @@ python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
appnope = {version = "*", markers = "platform_system == \"Darwin\""} appnope = {version = "*", markers = "platform_system == \"Darwin\""}
argcomplete = {version = ">=1.12.3", markers = "python_version < \"3.8.0\""}
debugpy = ">=1.0.0,<2.0" debugpy = ">=1.0.0,<2.0"
importlib-metadata = {version = "<4", markers = "python_version < \"3.8.0\""} importlib-metadata = {version = "<5", markers = "python_version < \"3.8.0\""}
ipython = ">=7.23.1,<8.0" ipython = ">=7.23.1,<8.0"
jupyter-client = "<7.0" jupyter-client = "<7.0"
matplotlib-inline = ">=0.1.0,<0.2.0" matplotlib-inline = ">=0.1.0,<0.2.0"
@ -727,6 +754,14 @@ category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
[[package]]
name = "kiwisolver"
version = "1.3.1"
description = "A fast implementation of the Cassowary constraint solver"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]] [[package]]
name = "lazy-object-proxy" name = "lazy-object-proxy"
version = "1.6.0" version = "1.6.0"
@ -770,6 +805,22 @@ category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
[[package]]
name = "matplotlib"
version = "3.4.3"
description = "Python plotting package"
category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
cycler = ">=0.10"
kiwisolver = ">=1.0.1"
numpy = ">=1.16"
pillow = ">=6.2.0"
pyparsing = ">=2.2.1"
python-dateutil = ">=2.7"
[[package]] [[package]]
name = "matplotlib-inline" name = "matplotlib-inline"
version = "0.1.2" version = "0.1.2"
@ -916,7 +967,7 @@ python-versions = ">=3.5"
[[package]] [[package]]
name = "notebook" name = "notebook"
version = "6.4.2" version = "6.4.3"
description = "A web-based notebook environment for interactive computing" description = "A web-based notebook environment for interactive computing"
category = "main" category = "main"
optional = false optional = false
@ -1036,6 +1087,14 @@ category = "main"
optional = false optional = false
python-versions = "*" python-versions = "*"
[[package]]
name = "pillow"
version = "8.3.1"
description = "Python Imaging Library (Fork)"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]] [[package]]
name = "platformdirs" name = "platformdirs"
version = "2.2.0" version = "2.2.0"
@ -1373,7 +1432,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]] [[package]]
name = "terminado" name = "terminado"
version = "0.10.1" version = "0.11.0"
description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library."
category = "main" category = "main"
optional = false optional = false
@ -1525,7 +1584,7 @@ jellyfish = "0.6.1"
[[package]] [[package]]
name = "virtualenv" name = "virtualenv"
version = "20.7.1" version = "20.7.2"
description = "Virtual Python Environment builder" description = "Virtual Python Environment builder"
category = "dev" category = "dev"
optional = false optional = false
@ -1601,7 +1660,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.7.1" python-versions = "^3.7.1"
content-hash = "14f5225f41212af2785865f984c8ce402712df0398b59c460cb849a0792b3f17" content-hash = "7fd9dc6d4902d1010da038192ba1a074d9c50264cb4b5d5fabfc45869f9f718a"
[metadata.files] [metadata.files]
appdirs = [ appdirs = [
@ -1612,6 +1671,10 @@ appnope = [
{file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"},
{file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"},
] ]
argcomplete = [
{file = "argcomplete-1.12.3-py2.py3-none-any.whl", hash = "sha256:291f0beca7fd49ce285d2f10e4c1c77e9460cf823eef2de54df0c0fec88b0d81"},
{file = "argcomplete-1.12.3.tar.gz", hash = "sha256:2c7dbffd8c045ea534921e63b0be6fe65e88599990d8dc408ac8c542b72a5445"},
]
argon2-cffi = [ argon2-cffi = [
{file = "argon2-cffi-20.1.0.tar.gz", hash = "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d"}, {file = "argon2-cffi-20.1.0.tar.gz", hash = "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d"},
{file = "argon2_cffi-20.1.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003"}, {file = "argon2_cffi-20.1.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003"},
@ -1741,6 +1804,10 @@ configparser = [
{file = "configparser-5.0.2-py3-none-any.whl", hash = "sha256:af59f2cdd7efbdd5d111c1976ecd0b82db9066653362f0962d7bf1d3ab89a1fa"}, {file = "configparser-5.0.2-py3-none-any.whl", hash = "sha256:af59f2cdd7efbdd5d111c1976ecd0b82db9066653362f0962d7bf1d3ab89a1fa"},
{file = "configparser-5.0.2.tar.gz", hash = "sha256:85d5de102cfe6d14a5172676f09d19c465ce63d6019cf0a4ef13385fc535e828"}, {file = "configparser-5.0.2.tar.gz", hash = "sha256:85d5de102cfe6d14a5172676f09d19c465ce63d6019cf0a4ef13385fc535e828"},
] ]
cycler = [
{file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"},
{file = "cycler-0.10.0.tar.gz", hash = "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"},
]
debugpy = [ debugpy = [
{file = "debugpy-1.4.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:a2c5a1c49239707ed5bc8e97d8f9252fb392d9e13c79c7b477593d7dde4ae24a"}, {file = "debugpy-1.4.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:a2c5a1c49239707ed5bc8e97d8f9252fb392d9e13c79c7b477593d7dde4ae24a"},
{file = "debugpy-1.4.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:ebc241351791595796864a960892e1cd58627064feda939d0377edd0730bbff2"}, {file = "debugpy-1.4.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:ebc241351791595796864a960892e1cd58627064feda939d0377edd0730bbff2"},
@ -1855,16 +1922,16 @@ idna = [
{file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"},
] ]
importlib-metadata = [ importlib-metadata = [
{file = "importlib_metadata-3.10.1-py3-none-any.whl", hash = "sha256:2ec0faae539743ae6aaa84b49a169670a465f7f5d64e6add98388cc29fd1f2f6"}, {file = "importlib_metadata-4.6.3-py3-none-any.whl", hash = "sha256:51c6635429c77cf1ae634c997ff9e53ca3438b495f10a55ba28594dd69764a8b"},
{file = "importlib_metadata-3.10.1.tar.gz", hash = "sha256:c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1"}, {file = "importlib_metadata-4.6.3.tar.gz", hash = "sha256:0645585859e9a6689c523927a5032f2ba5919f1f7d0e84bd4533312320de1ff9"},
] ]
iniconfig = [ iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
] ]
ipykernel = [ ipykernel = [
{file = "ipykernel-6.0.3-py3-none-any.whl", hash = "sha256:9f9f41a14caf2fde2b7802446adf83885afcbf50585a46d6c687292599a3c3af"}, {file = "ipykernel-6.1.0-py3-none-any.whl", hash = "sha256:804202fb4a621dce163bf88ce2687c98450d7ed728ef1d17d6f5ed20744c6e02"},
{file = "ipykernel-6.0.3.tar.gz", hash = "sha256:0df34a78c7e1422800d6078cde65ccdcdb859597046c338c759db4dbc535c58f"}, {file = "ipykernel-6.1.0.tar.gz", hash = "sha256:e21a718c696ded7d4d5e25b13d2bdd88e099e782fd3be66f9d2e66397543d283"},
] ]
ipython = [ ipython = [
{file = "ipython-7.26.0-py3-none-any.whl", hash = "sha256:892743b65c21ed72b806a3a602cca408520b3200b89d1924f4b3d2cdb3692362"}, {file = "ipython-7.26.0-py3-none-any.whl", hash = "sha256:892743b65c21ed72b806a3a602cca408520b3200b89d1924f4b3d2cdb3692362"},
@ -1940,6 +2007,40 @@ jupyterlab-widgets = [
{file = "jupyterlab_widgets-1.0.0-py3-none-any.whl", hash = "sha256:caeaf3e6103180e654e7d8d2b81b7d645e59e432487c1d35a41d6d3ee56b3fef"}, {file = "jupyterlab_widgets-1.0.0-py3-none-any.whl", hash = "sha256:caeaf3e6103180e654e7d8d2b81b7d645e59e432487c1d35a41d6d3ee56b3fef"},
{file = "jupyterlab_widgets-1.0.0.tar.gz", hash = "sha256:5c1a29a84d3069208cb506b10609175b249b6486d6b1cbae8fcde2a11584fb78"}, {file = "jupyterlab_widgets-1.0.0.tar.gz", hash = "sha256:5c1a29a84d3069208cb506b10609175b249b6486d6b1cbae8fcde2a11584fb78"},
] ]
kiwisolver = [
{file = "kiwisolver-1.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9"},
{file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0"},
{file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21"},
{file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05"},
{file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b"},
{file = "kiwisolver-1.3.1-cp36-cp36m-win32.whl", hash = "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9"},
{file = "kiwisolver-1.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4"},
{file = "kiwisolver-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0"},
{file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278"},
{file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689"},
{file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8"},
{file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31"},
{file = "kiwisolver-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc"},
{file = "kiwisolver-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454"},
{file = "kiwisolver-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72"},
{file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3"},
{file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131"},
{file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de"},
{file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18"},
{file = "kiwisolver-1.3.1-cp38-cp38-win32.whl", hash = "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81"},
{file = "kiwisolver-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e"},
{file = "kiwisolver-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000"},
{file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598"},
{file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882"},
{file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621"},
{file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54"},
{file = "kiwisolver-1.3.1-cp39-cp39-win32.whl", hash = "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030"},
{file = "kiwisolver-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6"},
{file = "kiwisolver-1.3.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d"},
{file = "kiwisolver-1.3.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3"},
{file = "kiwisolver-1.3.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6"},
{file = "kiwisolver-1.3.1.tar.gz", hash = "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248"},
]
lazy-object-proxy = [ lazy-object-proxy = [
{file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"},
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"},
@ -2052,6 +2153,29 @@ markupsafe = [
{file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
{file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
] ]
matplotlib = [
{file = "matplotlib-3.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c988bb43414c7c2b0a31bd5187b4d27fd625c080371b463a6d422047df78913"},
{file = "matplotlib-3.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f1c5efc278d996af8a251b2ce0b07bbeccb821f25c8c9846bdcb00ffc7f158aa"},
{file = "matplotlib-3.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:eeb1859efe7754b1460e1d4991bbd4a60a56f366bc422ef3a9c5ae05f0bc70b5"},
{file = "matplotlib-3.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:844a7b0233e4ff7fba57e90b8799edaa40b9e31e300b8d5efc350937fa8b1bea"},
{file = "matplotlib-3.4.3-cp37-cp37m-win32.whl", hash = "sha256:85f0c9cf724715e75243a7b3087cf4a3de056b55e05d4d76cc58d610d62894f3"},
{file = "matplotlib-3.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c70b6311dda3e27672f1bf48851a0de816d1ca6aaf3d49365fbdd8e959b33d2b"},
{file = "matplotlib-3.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b884715a59fec9ad3b6048ecf3860f3b2ce965e676ef52593d6fa29abcf7d330"},
{file = "matplotlib-3.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a78a3b51f29448c7f4d4575e561f6b0dbb8d01c13c2046ab6c5220eb25c06506"},
{file = "matplotlib-3.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6a724e3a48a54b8b6e7c4ae38cd3d07084508fa47c410c8757e9db9791421838"},
{file = "matplotlib-3.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48e1e0859b54d5f2e29bb78ca179fd59b971c6ceb29977fb52735bfd280eb0f5"},
{file = "matplotlib-3.4.3-cp38-cp38-win32.whl", hash = "sha256:01c9de93a2ca0d128c9064f23709362e7fefb34910c7c9e0b8ab0de8258d5eda"},
{file = "matplotlib-3.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:ebfb01a65c3f5d53a8c2a8133fec2b5221281c053d944ae81ff5822a68266617"},
{file = "matplotlib-3.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8b53f336a4688cfce615887505d7e41fd79b3594bf21dd300531a4f5b4f746a"},
{file = "matplotlib-3.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:fcd6f1954943c0c192bfbebbac263f839d7055409f1173f80d8b11a224d236da"},
{file = "matplotlib-3.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6be8df61b1626e1a142c57e065405e869e9429b4a6dab4a324757d0dc4d42235"},
{file = "matplotlib-3.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:41b6e307458988891fcdea2d8ecf84a8c92d53f84190aa32da65f9505546e684"},
{file = "matplotlib-3.4.3-cp39-cp39-win32.whl", hash = "sha256:f72657f1596199dc1e4e7a10f52a4784ead8a711f4e5b59bea95bdb97cf0e4fd"},
{file = "matplotlib-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:f15edcb0629a0801738925fe27070480f446fcaa15de65946ff946ad99a59a40"},
{file = "matplotlib-3.4.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:556965514b259204637c360d213de28d43a1f4aed1eca15596ce83f768c5a56f"},
{file = "matplotlib-3.4.3-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:54a026055d5f8614f184e588f6e29064019a0aa8448450214c0b60926d62d919"},
{file = "matplotlib-3.4.3.tar.gz", hash = "sha256:fc4f526dfdb31c9bd6b8ca06bf9fab663ca12f3ec9cdf4496fb44bc680140318"},
]
matplotlib-inline = [ matplotlib-inline = [
{file = "matplotlib-inline-0.1.2.tar.gz", hash = "sha256:f41d5ff73c9f5385775d5c0bc13b424535c8402fe70ea8210f93e11f3683993e"}, {file = "matplotlib-inline-0.1.2.tar.gz", hash = "sha256:f41d5ff73c9f5385775d5c0bc13b424535c8402fe70ea8210f93e11f3683993e"},
{file = "matplotlib_inline-0.1.2-py3-none-any.whl", hash = "sha256:5cf1176f554abb4fa98cb362aa2b55c500147e4bdbb07e3fda359143e1da0811"}, {file = "matplotlib_inline-0.1.2-py3-none-any.whl", hash = "sha256:5cf1176f554abb4fa98cb362aa2b55c500147e4bdbb07e3fda359143e1da0811"},
@ -2114,8 +2238,8 @@ nest-asyncio = [
{file = "nest_asyncio-1.5.1.tar.gz", hash = "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa"}, {file = "nest_asyncio-1.5.1.tar.gz", hash = "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa"},
] ]
notebook = [ notebook = [
{file = "notebook-6.4.2-py3-none-any.whl", hash = "sha256:5ae23d7f831a5788e8bd51a0ba65c486db3bfd43e9db97a62330b6273e3175e3"}, {file = "notebook-6.4.3-py3-none-any.whl", hash = "sha256:b50eafa8208d5db966efd1caa4076b4dfc51815e02a805b32ecd717e9e6cc071"},
{file = "notebook-6.4.2.tar.gz", hash = "sha256:ba9db5e5a9bd2d272b67e3de9143cca2be5125578f1c4f2902d7178ce2f0b4ff"}, {file = "notebook-6.4.3.tar.gz", hash = "sha256:e6b6dfed36b00cf950f63c0d42e947c101d4258aec21624de62b9e0c11ed5c0d"},
] ]
numpy = [ numpy = [
{file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"},
@ -2195,6 +2319,47 @@ pickleshare = [
{file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"},
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
] ]
pillow = [
{file = "Pillow-8.3.1-1-cp36-cp36m-win_amd64.whl", hash = "sha256:fd7eef578f5b2200d066db1b50c4aa66410786201669fb76d5238b007918fb24"},
{file = "Pillow-8.3.1-1-cp37-cp37m-win_amd64.whl", hash = "sha256:75e09042a3b39e0ea61ce37e941221313d51a9c26b8e54e12b3ececccb71718a"},
{file = "Pillow-8.3.1-1-cp38-cp38-win_amd64.whl", hash = "sha256:c0e0550a404c69aab1e04ae89cca3e2a042b56ab043f7f729d984bf73ed2a093"},
{file = "Pillow-8.3.1-1-cp39-cp39-win_amd64.whl", hash = "sha256:479ab11cbd69612acefa8286481f65c5dece2002ffaa4f9db62682379ca3bb77"},
{file = "Pillow-8.3.1-1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f156d6ecfc747ee111c167f8faf5f4953761b5e66e91a4e6767e548d0f80129c"},
{file = "Pillow-8.3.1-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:196560dba4da7a72c5e7085fccc5938ab4075fd37fe8b5468869724109812edd"},
{file = "Pillow-8.3.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c9569049d04aaacd690573a0398dbd8e0bf0255684fee512b413c2142ab723"},
{file = "Pillow-8.3.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c088a000dfdd88c184cc7271bfac8c5b82d9efa8637cd2b68183771e3cf56f04"},
{file = "Pillow-8.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fc214a6b75d2e0ea7745488da7da3c381f41790812988c7a92345978414fad37"},
{file = "Pillow-8.3.1-cp36-cp36m-win32.whl", hash = "sha256:a17ca41f45cf78c2216ebfab03add7cc350c305c38ff34ef4eef66b7d76c5229"},
{file = "Pillow-8.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:67b3666b544b953a2777cb3f5a922e991be73ab32635666ee72e05876b8a92de"},
{file = "Pillow-8.3.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:ff04c373477723430dce2e9d024c708a047d44cf17166bf16e604b379bf0ca14"},
{file = "Pillow-8.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9364c81b252d8348e9cc0cb63e856b8f7c1b340caba6ee7a7a65c968312f7dab"},
{file = "Pillow-8.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a2f381932dca2cf775811a008aa3027671ace723b7a38838045b1aee8669fdcf"},
{file = "Pillow-8.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d0da39795049a9afcaadec532e7b669b5ebbb2a9134576ebcc15dd5bdae33cc0"},
{file = "Pillow-8.3.1-cp37-cp37m-win32.whl", hash = "sha256:2b6dfa068a8b6137da34a4936f5a816aba0ecc967af2feeb32c4393ddd671cba"},
{file = "Pillow-8.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a4eef1ff2d62676deabf076f963eda4da34b51bc0517c70239fafed1d5b51500"},
{file = "Pillow-8.3.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:660a87085925c61a0dcc80efb967512ac34dbb256ff7dd2b9b4ee8dbdab58cf4"},
{file = "Pillow-8.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:15a2808e269a1cf2131930183dcc0419bc77bb73eb54285dde2706ac9939fa8e"},
{file = "Pillow-8.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:969cc558cca859cadf24f890fc009e1bce7d7d0386ba7c0478641a60199adf79"},
{file = "Pillow-8.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2ee77c14a0299d0541d26f3d8500bb57e081233e3fa915fa35abd02c51fa7fae"},
{file = "Pillow-8.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c11003197f908878164f0e6da15fce22373ac3fc320cda8c9d16e6bba105b844"},
{file = "Pillow-8.3.1-cp38-cp38-win32.whl", hash = "sha256:3f08bd8d785204149b5b33e3b5f0ebbfe2190ea58d1a051c578e29e39bfd2367"},
{file = "Pillow-8.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:70af7d222df0ff81a2da601fab42decb009dc721545ed78549cb96e3a1c5f0c8"},
{file = "Pillow-8.3.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:37730f6e68bdc6a3f02d2079c34c532330d206429f3cee651aab6b66839a9f0e"},
{file = "Pillow-8.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bc3c7ef940eeb200ca65bd83005eb3aae8083d47e8fcbf5f0943baa50726856"},
{file = "Pillow-8.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c35d09db702f4185ba22bb33ef1751ad49c266534339a5cebeb5159d364f6f82"},
{file = "Pillow-8.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b2efa07f69dc395d95bb9ef3299f4ca29bcb2157dc615bae0b42c3c20668ffc"},
{file = "Pillow-8.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cc866706d56bd3a7dbf8bac8660c6f6462f2f2b8a49add2ba617bc0c54473d83"},
{file = "Pillow-8.3.1-cp39-cp39-win32.whl", hash = "sha256:9a211b663cf2314edbdb4cf897beeb5c9ee3810d1d53f0e423f06d6ebbf9cd5d"},
{file = "Pillow-8.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:c2a5ff58751670292b406b9f06e07ed1446a4b13ffced6b6cab75b857485cbc8"},
{file = "Pillow-8.3.1-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c379425c2707078dfb6bfad2430728831d399dc95a7deeb92015eb4c92345eaf"},
{file = "Pillow-8.3.1-pp36-pypy36_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:114f816e4f73f9ec06997b2fde81a92cbf0777c9e8f462005550eed6bae57e63"},
{file = "Pillow-8.3.1-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8960a8a9f4598974e4c2aeb1bff9bdd5db03ee65fd1fce8adf3223721aa2a636"},
{file = "Pillow-8.3.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:147bd9e71fb9dcf08357b4d530b5167941e222a6fd21f869c7911bac40b9994d"},
{file = "Pillow-8.3.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1fd5066cd343b5db88c048d971994e56b296868766e461b82fa4e22498f34d77"},
{file = "Pillow-8.3.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4ebde71785f8bceb39dcd1e7f06bcc5d5c3cf48b9f69ab52636309387b097c8"},
{file = "Pillow-8.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c03e24be975e2afe70dfc5da6f187eea0b49a68bb2b69db0f30a61b7031cee4"},
{file = "Pillow-8.3.1.tar.gz", hash = "sha256:2cac53839bfc5cece8fdbe7f084d5e3ee61e1303cccc86511d351adcb9e2c792"},
]
platformdirs = [ platformdirs = [
{file = "platformdirs-2.2.0-py3-none-any.whl", hash = "sha256:4666d822218db6a262bdfdc9c39d21f23b4cfdb08af331a81e92751daf6c866c"}, {file = "platformdirs-2.2.0-py3-none-any.whl", hash = "sha256:4666d822218db6a262bdfdc9c39d21f23b4cfdb08af331a81e92751daf6c866c"},
{file = "platformdirs-2.2.0.tar.gz", hash = "sha256:632daad3ab546bd8e6af0537d09805cec458dce201bccfe23012df73332e181e"}, {file = "platformdirs-2.2.0.tar.gz", hash = "sha256:632daad3ab546bd8e6af0537d09805cec458dce201bccfe23012df73332e181e"},
@ -2481,8 +2646,8 @@ six = [
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
] ]
terminado = [ terminado = [
{file = "terminado-0.10.1-py3-none-any.whl", hash = "sha256:c89ace5bffd0e7268bdcf22526830eb787fd146ff9d78691a0528386f92b9ae3"}, {file = "terminado-0.11.0-py3-none-any.whl", hash = "sha256:221eef83e6a504894842f7dccfa971ca2e98ec22a8a9118577e5257527674b42"},
{file = "terminado-0.10.1.tar.gz", hash = "sha256:89d5dac2f4e2b39758a0ff9a3b643707c95a020a6df36e70583b88297cd59cbe"}, {file = "terminado-0.11.0.tar.gz", hash = "sha256:1e01183885f64c1bba3cf89a5a995ad4acfed4e5f00aebcce1bf7f089b0825a1"},
] ]
testpath = [ testpath = [
{file = "testpath-0.5.0-py3-none-any.whl", hash = "sha256:8044f9a0bab6567fc644a3593164e872543bb44225b0e24846e2c89237937589"}, {file = "testpath-0.5.0-py3-none-any.whl", hash = "sha256:8044f9a0bab6567fc644a3593164e872543bb44225b0e24846e2c89237937589"},
@ -2600,8 +2765,8 @@ us = [
{file = "us-2.0.2.tar.gz", hash = "sha256:cb11ad0d43deff3a1c3690c74f0c731cff5b862c73339df2edd91133e1496fbc"}, {file = "us-2.0.2.tar.gz", hash = "sha256:cb11ad0d43deff3a1c3690c74f0c731cff5b862c73339df2edd91133e1496fbc"},
] ]
virtualenv = [ virtualenv = [
{file = "virtualenv-20.7.1-py2.py3-none-any.whl", hash = "sha256:73863dc3be1efe6ee638e77495c0c195a6384ae7b15c561f3ceb2698ae7267c1"}, {file = "virtualenv-20.7.2-py2.py3-none-any.whl", hash = "sha256:e4670891b3a03eb071748c569a87cceaefbf643c5bac46d996c5a45c34aa0f06"},
{file = "virtualenv-20.7.1.tar.gz", hash = "sha256:57bcb59c5898818bd555b1e0cfcf668bd6204bc2b53ad0e70a52413bd790f9e4"}, {file = "virtualenv-20.7.2.tar.gz", hash = "sha256:9ef4e8ee4710826e98ff3075c9a4739e2cb1040de6a2a8d35db0055840dc96a0"},
] ]
wcwidth = [ wcwidth = [
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},

View file

@ -12,6 +12,7 @@ geopandas = "^0.9.0"
ipython = "^7.24.1" ipython = "^7.24.1"
jupyter = "^1.0.0" jupyter = "^1.0.0"
jupyter-contrib-nbextensions = "^0.5.1" jupyter-contrib-nbextensions = "^0.5.1"
matplotlib = "^3.4.2"
numpy = "^1.21.0" numpy = "^1.21.0"
pandas = "^1.2.5" pandas = "^1.2.5"
python = "^3.7.1" python = "^3.7.1"
@ -61,6 +62,7 @@ disable = [
"W0613", # Disables unused argument "W0613", # Disables unused argument
"C0116", # Disables missing function or method docstring "C0116", # Disables missing function or method docstring
"C0115", # Disables missing class docstring "C0115", # Disables missing class docstring
"R0915", # Disables too many statements (score generation transform)
] ]
[tool.pylint.FORMAT] [tool.pylint.FORMAT]
@ -73,6 +75,9 @@ ignore-docstrings = "yes"
ignore-imports = "yes" ignore-imports = "yes"
min-similarity-lines = 4 min-similarity-lines = 4
[tool.black]
line-length = 80
[tool.liccheck] [tool.liccheck]
# Authorized and unauthorized licenses in LOWER CASE # Authorized and unauthorized licenses in LOWER CASE
authorized_licenses = [ authorized_licenses = [
@ -102,6 +107,7 @@ authorized_licenses = [
"python software foundation", "python software foundation",
"zpl 2.1", "zpl 2.1",
"gpl v3", "gpl v3",
"historical permission notice and disclaimer (hpnd)",
] ]
[tool.poetry.scripts] [tool.poetry.scripts]

View file

@ -1,4 +1,5 @@
appnope==0.1.2; sys_platform == "darwin" and python_version >= "3.7" and platform_system == "Darwin" appnope==0.1.2; sys_platform == "darwin" and python_version >= "3.7" and platform_system == "Darwin"
argcomplete==1.12.3; python_version < "3.8.0" and python_version >= "3.7"
argon2-cffi==20.1.0; python_version >= "3.6" argon2-cffi==20.1.0; python_version >= "3.6"
async-generator==1.10; python_full_version >= "3.6.1" and python_version >= "3.7" async-generator==1.10; python_full_version >= "3.6.1" and python_version >= "3.7"
attrs==21.2.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" attrs==21.2.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"
@ -12,6 +13,7 @@ click-plugins==1.1.1; python_version >= "3.6"
click==8.0.1; python_version >= "3.6" click==8.0.1; python_version >= "3.6"
cligj==0.7.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version < "4" and python_version >= "3.6" cligj==0.7.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version < "4" and python_version >= "3.6"
colorama==0.4.4; python_version >= "3.7" and python_full_version < "3.0.0" and platform_system == "Windows" and sys_platform == "win32" or platform_system == "Windows" and python_version >= "3.7" and python_full_version >= "3.5.0" and sys_platform == "win32" colorama==0.4.4; python_version >= "3.7" and python_full_version < "3.0.0" and platform_system == "Windows" and sys_platform == "win32" or platform_system == "Windows" and python_version >= "3.7" and python_full_version >= "3.5.0" and sys_platform == "win32"
cycler==0.10.0; python_version >= "3.7"
debugpy==1.4.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" debugpy==1.4.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7"
decorator==5.0.9; python_version >= "3.7" decorator==5.0.9; python_version >= "3.7"
defusedxml==0.7.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" defusedxml==0.7.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7"
@ -20,8 +22,8 @@ entrypoints==0.3; python_version >= "3.7"
fiona==1.8.20; python_version >= "3.6" fiona==1.8.20; python_version >= "3.6"
geopandas==0.9.0; python_version >= "3.6" geopandas==0.9.0; python_version >= "3.6"
idna==3.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.5" idna==3.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.5"
importlib-metadata==3.10.1; python_version < "3.8" and python_version >= "3.7" importlib-metadata==4.6.3; python_version == "3.7"
ipykernel==6.0.3; python_version >= "3.7" ipykernel==6.1.0; python_version >= "3.7"
ipython-genutils==0.2.0; python_version >= "3.7" ipython-genutils==0.2.0; python_version >= "3.7"
ipython==7.26.0; python_version >= "3.7" ipython==7.26.0; python_version >= "3.7"
ipywidgets==7.6.3 ipywidgets==7.6.3
@ -40,16 +42,18 @@ jupyter-nbextensions-configurator==0.4.1
jupyter==1.0.0 jupyter==1.0.0
jupyterlab-pygments==0.1.2; python_version >= "3.7" jupyterlab-pygments==0.1.2; python_version >= "3.7"
jupyterlab-widgets==1.0.0; python_version >= "3.6" jupyterlab-widgets==1.0.0; python_version >= "3.6"
kiwisolver==1.3.1; python_version >= "3.7"
lxml==4.6.3; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" lxml==4.6.3; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0"
markupsafe==2.0.1; python_version >= "3.7" markupsafe==2.0.1; python_version >= "3.7"
matplotlib-inline==0.1.2; python_version >= "3.7" matplotlib-inline==0.1.2; python_version >= "3.7"
matplotlib==3.4.3; python_version >= "3.7"
mistune==0.8.4; python_version >= "3.7" mistune==0.8.4; python_version >= "3.7"
munch==2.5.0; python_version >= "3.6" munch==2.5.0; python_version >= "3.6"
nbclient==0.5.3; python_full_version >= "3.6.1" and python_version >= "3.7" nbclient==0.5.3; python_full_version >= "3.6.1" and python_version >= "3.7"
nbconvert==6.1.0; python_version >= "3.7" nbconvert==6.1.0; python_version >= "3.7"
nbformat==5.1.3; python_full_version >= "3.6.1" and python_version >= "3.7" nbformat==5.1.3; python_full_version >= "3.6.1" and python_version >= "3.7"
nest-asyncio==1.5.1; python_full_version >= "3.6.1" and python_version >= "3.7" nest-asyncio==1.5.1; python_full_version >= "3.6.1" and python_version >= "3.7"
notebook==6.4.2; python_version >= "3.6" notebook==6.4.3; python_version >= "3.6"
numpy==1.21.1; python_version >= "3.7" numpy==1.21.1; python_version >= "3.7"
packaging==21.0; python_version >= "3.7" packaging==21.0; python_version >= "3.7"
pandas==1.3.1; python_full_version >= "3.7.1" pandas==1.3.1; python_full_version >= "3.7.1"
@ -57,6 +61,7 @@ pandocfilters==1.4.3; python_version >= "3.7" and python_full_version < "3.0.0"
parso==0.8.2; python_version >= "3.7" parso==0.8.2; python_version >= "3.7"
pexpect==4.8.0; sys_platform != "win32" and python_version >= "3.7" pexpect==4.8.0; sys_platform != "win32" and python_version >= "3.7"
pickleshare==0.7.5; python_version >= "3.7" pickleshare==0.7.5; python_version >= "3.7"
pillow==8.3.1; python_version >= "3.7"
prometheus-client==0.11.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" prometheus-client==0.11.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6"
prompt-toolkit==3.0.19; python_full_version >= "3.6.1" and python_version >= "3.7" prompt-toolkit==3.0.19; python_full_version >= "3.6.1" and python_version >= "3.7"
ptyprocess==0.7.0; sys_platform != "win32" and python_version >= "3.7" and os_name != "nt" ptyprocess==0.7.0; sys_platform != "win32" and python_version >= "3.7" and os_name != "nt"
@ -78,8 +83,8 @@ qtpy==1.9.0; python_version >= "3.6"
requests==2.26.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.6.0") requests==2.26.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.6.0")
send2trash==1.8.0; python_version >= "3.6" send2trash==1.8.0; python_version >= "3.6"
shapely==1.7.1; python_version >= "3.6" shapely==1.7.1; python_version >= "3.6"
six==1.16.0; python_full_version >= "3.7.1" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.6") and (python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.7") and (python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.5") six==1.16.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.7"
terminado==0.10.1; python_version >= "3.6" terminado==0.11.0; python_version >= "3.6"
testpath==0.5.0; python_version >= "3.7" testpath==0.5.0; python_version >= "3.7"
tornado==6.1; python_full_version >= "3.6.1" and python_version >= "3.7" tornado==6.1; python_full_version >= "3.6.1" and python_version >= "3.7"
tqdm==4.62.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") tqdm==4.62.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0")