Fixes for flake8

This commit is contained in:
Nat Hillard 2021-08-04 21:13:56 -04:00
parent 508925618b
commit 6c86dd7e4e
4 changed files with 35 additions and 14 deletions

View file

@ -27,7 +27,11 @@ def etl_runner(dataset_to_run: str = None) -> None:
"module_dir": "census_acs",
"class_name": "CensusACSETL",
},
{"name": "ejscreen", "module_dir": "ejscreen", "class_name": "EJScreenETL",},
{
"name": "ejscreen",
"module_dir": "ejscreen",
"class_name": "EJScreenETL",
},
{
"name": "housing_and_transportation",
"module_dir": "housing_and_transportation",
@ -43,12 +47,17 @@ def etl_runner(dataset_to_run: str = None) -> None:
"module_dir": "calenviroscreen",
"class_name": "CalEnviroScreenETL",
},
{"name": "hud_recap", "module_dir": "hud_recap", "class_name": "HudRecapETL",},
{
"name": "hud_recap",
"module_dir": "hud_recap",
"class_name": "HudRecapETL",
},
]
if dataset_to_run:
dataset_element = next(
(item for item in dataset_list if item["name"] == dataset_to_run), None,
(item for item in dataset_list if item["name"] == dataset_to_run),
None,
)
if not dataset_list:
raise ValueError("Invalid dataset name")

View file

@ -39,7 +39,9 @@ def download_census_csvs(data_path: Path) -> None:
# But using 2010 for now
cbg_state_url = f"https://www2.census.gov/geo/tiger/TIGER2010/BG/2010/tl_2010_{fips}_bg10.zip"
unzip_file_from_url(
cbg_state_url, data_path / "tmp", data_path / "census" / "shp" / fips,
cbg_state_url,
data_path / "tmp",
data_path / "census" / "shp" / fips,
)
cmd = (
@ -78,22 +80,32 @@ def download_census_csvs(data_path: Path) -> None:
csv_dir_path / f"{state_id}.csv", mode="w", newline=""
) as cbg_csv_file:
cbg_csv_file_writer = csv.writer(
cbg_csv_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL,
cbg_csv_file,
delimiter=",",
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
)
for geoid10 in geoid10_list:
cbg_csv_file_writer.writerow(
[geoid10,]
[
geoid10,
]
)
## write US csv
with open(csv_dir_path / "us.csv", mode="w", newline="") as cbg_csv_file:
cbg_csv_file_writer = csv.writer(
cbg_csv_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL,
cbg_csv_file,
delimiter=",",
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
)
for geoid10 in cbg_national:
cbg_csv_file_writer.writerow(
[geoid10,]
[
geoid10,
]
)
## create national geojson

View file

@ -27,7 +27,7 @@ def generate_tiles(data_path: Path) -> None:
os.mkdir(low_tile_path)
# generate high mbtiles file
logger.info(f"Generating USA High mbtiles file")
logger.info("Generating USA High mbtiles file")
cmd = "tippecanoe "
cmd += f"--minimum-zoom={USA_HIGH_MIN_ZOOM} --maximum-zoom={USA_HIGH_MAX_ZOOM} --layer=blocks "
cmd += f"--output={high_tile_path}/usa_high.mbtiles "
@ -35,7 +35,7 @@ def generate_tiles(data_path: Path) -> None:
call(cmd, shell=True)
# generate high mvts
logger.info(f"Generating USA High mvt folders and files")
logger.info("Generating USA High mvt folders and files")
cmd = "tippecanoe "
cmd += f"--minimum-zoom={USA_HIGH_MIN_ZOOM} --maximum-zoom={USA_HIGH_MAX_ZOOM} --no-tile-compression "
cmd += f"--output-to-directory={high_tile_path} "
@ -43,7 +43,7 @@ def generate_tiles(data_path: Path) -> None:
call(cmd, shell=True)
# generate low mbtiles file
logger.info(f"Generating USA Low mbtiles file")
logger.info("Generating USA Low mbtiles file")
cmd = "tippecanoe "
cmd += f"--minimum-zoom={USA_LOW_MIN_ZOOM} --maximum-zoom={USA_LOW_MAX_ZOOM} --layer=blocks "
cmd += f"--output={low_tile_path}/usa_low.mbtiles "
@ -51,7 +51,7 @@ def generate_tiles(data_path: Path) -> None:
call(cmd, shell=True)
# generate low mvts
logger.info(f"Generating USA Low mvt folders and files")
logger.info("Generating USA Low mvt folders and files")
cmd = "tippecanoe "
cmd += f"--minimum-zoom={USA_LOW_MIN_ZOOM} --maximum-zoom={USA_LOW_MAX_ZOOM} --no-tile-compression "
cmd += f"--output-to-directory={low_tile_path} "

View file

@ -9,8 +9,8 @@ skip_missing_interpreters = true
# lints python code in src and tests
basepython = python3.9
deps = -rrequirements.txt
commands = black etl application.py config.py utils.py
flake8 etl application.py config.py utils.py
commands = black data_pipeline
flake8 data_pipeline
# pylint etl application.py config.py utils.py
[testenv:checkdeps]