From 6c86dd7e4e4ce90747a556ce8b413efe736bc290 Mon Sep 17 00:00:00 2001 From: Nat Hillard Date: Wed, 4 Aug 2021 21:13:56 -0400 Subject: [PATCH] Fixes for flake8 --- .../data-pipeline/data_pipeline/etl/runner.py | 15 ++++++++++--- .../data_pipeline/etl/sources/census/etl.py | 22 ++++++++++++++----- .../data_pipeline/tile/generate.py | 8 +++---- data/data-pipeline/tox.ini | 4 ++-- 4 files changed, 35 insertions(+), 14 deletions(-) diff --git a/data/data-pipeline/data_pipeline/etl/runner.py b/data/data-pipeline/data_pipeline/etl/runner.py index 7e5f2449..093012de 100644 --- a/data/data-pipeline/data_pipeline/etl/runner.py +++ b/data/data-pipeline/data_pipeline/etl/runner.py @@ -27,7 +27,11 @@ def etl_runner(dataset_to_run: str = None) -> None: "module_dir": "census_acs", "class_name": "CensusACSETL", }, - {"name": "ejscreen", "module_dir": "ejscreen", "class_name": "EJScreenETL",}, + { + "name": "ejscreen", + "module_dir": "ejscreen", + "class_name": "EJScreenETL", + }, { "name": "housing_and_transportation", "module_dir": "housing_and_transportation", @@ -43,12 +47,17 @@ def etl_runner(dataset_to_run: str = None) -> None: "module_dir": "calenviroscreen", "class_name": "CalEnviroScreenETL", }, - {"name": "hud_recap", "module_dir": "hud_recap", "class_name": "HudRecapETL",}, + { + "name": "hud_recap", + "module_dir": "hud_recap", + "class_name": "HudRecapETL", + }, ] if dataset_to_run: dataset_element = next( - (item for item in dataset_list if item["name"] == dataset_to_run), None, + (item for item in dataset_list if item["name"] == dataset_to_run), + None, ) if not dataset_list: raise ValueError("Invalid dataset name") diff --git a/data/data-pipeline/data_pipeline/etl/sources/census/etl.py b/data/data-pipeline/data_pipeline/etl/sources/census/etl.py index 9dae7beb..5e70dbd9 100644 --- a/data/data-pipeline/data_pipeline/etl/sources/census/etl.py +++ b/data/data-pipeline/data_pipeline/etl/sources/census/etl.py @@ -39,7 +39,9 @@ def download_census_csvs(data_path: Path) -> None: # But using 2010 for now cbg_state_url = f"https://www2.census.gov/geo/tiger/TIGER2010/BG/2010/tl_2010_{fips}_bg10.zip" unzip_file_from_url( - cbg_state_url, data_path / "tmp", data_path / "census" / "shp" / fips, + cbg_state_url, + data_path / "tmp", + data_path / "census" / "shp" / fips, ) cmd = ( @@ -78,22 +80,32 @@ def download_census_csvs(data_path: Path) -> None: csv_dir_path / f"{state_id}.csv", mode="w", newline="" ) as cbg_csv_file: cbg_csv_file_writer = csv.writer( - cbg_csv_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL, + cbg_csv_file, + delimiter=",", + quotechar='"', + quoting=csv.QUOTE_MINIMAL, ) for geoid10 in geoid10_list: cbg_csv_file_writer.writerow( - [geoid10,] + [ + geoid10, + ] ) ## write US csv with open(csv_dir_path / "us.csv", mode="w", newline="") as cbg_csv_file: cbg_csv_file_writer = csv.writer( - cbg_csv_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL, + cbg_csv_file, + delimiter=",", + quotechar='"', + quoting=csv.QUOTE_MINIMAL, ) for geoid10 in cbg_national: cbg_csv_file_writer.writerow( - [geoid10,] + [ + geoid10, + ] ) ## create national geojson diff --git a/data/data-pipeline/data_pipeline/tile/generate.py b/data/data-pipeline/data_pipeline/tile/generate.py index 4710bc8d..02dda601 100644 --- a/data/data-pipeline/data_pipeline/tile/generate.py +++ b/data/data-pipeline/data_pipeline/tile/generate.py @@ -27,7 +27,7 @@ def generate_tiles(data_path: Path) -> None: os.mkdir(low_tile_path) # generate high mbtiles file - logger.info(f"Generating USA High mbtiles file") + logger.info("Generating USA High mbtiles file") cmd = "tippecanoe " cmd += f"--minimum-zoom={USA_HIGH_MIN_ZOOM} --maximum-zoom={USA_HIGH_MAX_ZOOM} --layer=blocks " cmd += f"--output={high_tile_path}/usa_high.mbtiles " @@ -35,7 +35,7 @@ def generate_tiles(data_path: Path) -> None: call(cmd, shell=True) # generate high mvts - logger.info(f"Generating USA High mvt folders and files") + logger.info("Generating USA High mvt folders and files") cmd = "tippecanoe " cmd += f"--minimum-zoom={USA_HIGH_MIN_ZOOM} --maximum-zoom={USA_HIGH_MAX_ZOOM} --no-tile-compression " cmd += f"--output-to-directory={high_tile_path} " @@ -43,7 +43,7 @@ def generate_tiles(data_path: Path) -> None: call(cmd, shell=True) # generate low mbtiles file - logger.info(f"Generating USA Low mbtiles file") + logger.info("Generating USA Low mbtiles file") cmd = "tippecanoe " cmd += f"--minimum-zoom={USA_LOW_MIN_ZOOM} --maximum-zoom={USA_LOW_MAX_ZOOM} --layer=blocks " cmd += f"--output={low_tile_path}/usa_low.mbtiles " @@ -51,7 +51,7 @@ def generate_tiles(data_path: Path) -> None: call(cmd, shell=True) # generate low mvts - logger.info(f"Generating USA Low mvt folders and files") + logger.info("Generating USA Low mvt folders and files") cmd = "tippecanoe " cmd += f"--minimum-zoom={USA_LOW_MIN_ZOOM} --maximum-zoom={USA_LOW_MAX_ZOOM} --no-tile-compression " cmd += f"--output-to-directory={low_tile_path} " diff --git a/data/data-pipeline/tox.ini b/data/data-pipeline/tox.ini index 85a39d77..eb509f5f 100644 --- a/data/data-pipeline/tox.ini +++ b/data/data-pipeline/tox.ini @@ -9,8 +9,8 @@ skip_missing_interpreters = true # lints python code in src and tests basepython = python3.9 deps = -rrequirements.txt -commands = black etl application.py config.py utils.py - flake8 etl application.py config.py utils.py +commands = black data_pipeline + flake8 data_pipeline # pylint etl application.py config.py utils.py [testenv:checkdeps]