mirror of
https://github.com/DOI-DO/j40-cejst-2.git
synced 2025-02-23 01:54:18 -08:00
Merge branch 'main' into vimusds/release/frontend-narwhal-2
This commit is contained in:
commit
3bed8ef70b
9 changed files with 725 additions and 1230 deletions
|
@ -16,7 +16,7 @@ GATSBY_FILE_DL_PATH_TSD_PDF=downloadable/cejst_technical_support_document.pdf
|
|||
GATSBY_FILE_DL_PATH_TSD_ES_PDF=downloadable/cejst-technical-support-document-es.pdf
|
||||
GATSBY_FILE_DL_PATH_COMMUNITIES_LIST_XLS=downloadable/communities-2022-05-12-1914GMT.xlsx
|
||||
GATSBY_FILE_DL_PATH_COMMUNITIES_LIST_CSV=downloadable/communities-2022-05-12-1914GMT.csv
|
||||
GATSBY_FILE_DL_PATH_HOW_TO_COMMUNITIES_PDF=downloadable/Draft_Communities_List.pdf
|
||||
GATSBY_FILE_DL_PATH_HOW_TO_COMMUNITIES_PDF=downloadable/draft_communities_list.pdf
|
||||
|
||||
GATSBY_MAP_TILES_PATH=tiles
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
import datetime
|
||||
|
||||
|
@ -44,22 +45,36 @@ DATA_SCORE_JSON_INDEX_FILE_PATH = (
|
|||
DATA_SCORE_TILES_DIR = DATA_SCORE_DIR / "tiles"
|
||||
|
||||
# Downloadable paths
|
||||
current_dt = datetime.datetime.now()
|
||||
timestamp_str = current_dt.strftime("%Y-%m-%d-%H%MGMT")
|
||||
if not os.environ.get("J40_VERSION_DATE_STRING"):
|
||||
current_dt = datetime.datetime.now()
|
||||
timestamp_str = current_dt.strftime("%Y-%m-%d-%H%MGMT")
|
||||
else:
|
||||
timestamp_str = os.environ.get("J40_VERSION_DATE_STRING")
|
||||
|
||||
if not os.environ.get("J40_VERSION_LABEL_STRING"):
|
||||
version_str = "beta"
|
||||
else:
|
||||
version_str = os.environ.get("J40_VERSION_LABEL_STRING")
|
||||
|
||||
SCORE_DOWNLOADABLE_DIR = DATA_SCORE_DIR / "downloadable"
|
||||
SCORE_DOWNLOADABLE_PDF_FILE_NAME = "Draft_Communities_List.pdf"
|
||||
SCORE_DOWNLOADABLE_PDF_FILE_NAME = "draft_communities_list.pdf"
|
||||
SCORE_DOWNLOADABLE_PDF_FILE_PATH = FILES_PATH / SCORE_DOWNLOADABLE_PDF_FILE_NAME
|
||||
SCORE_DOWNLOADABLE_CSV_FILE_PATH = (
|
||||
SCORE_DOWNLOADABLE_DIR / f"communities-{timestamp_str}.csv"
|
||||
SCORE_DOWNLOADABLE_DIR / f"{version_str}-communities-{timestamp_str}.csv"
|
||||
)
|
||||
SCORE_DOWNLOADABLE_EXCEL_FILE_PATH = (
|
||||
SCORE_DOWNLOADABLE_DIR / f"communities-{timestamp_str}.xlsx"
|
||||
SCORE_DOWNLOADABLE_DIR / f"{version_str}-communities-{timestamp_str}.xlsx"
|
||||
)
|
||||
SCORE_DOWNLOADABLE_CODEBOOK_FILE_PATH = (
|
||||
SCORE_DOWNLOADABLE_DIR / f"codebook-{timestamp_str}.csv"
|
||||
SCORE_DOWNLOADABLE_DIR / f"{version_str}-codebook-{timestamp_str}.csv"
|
||||
)
|
||||
SCORE_DOWNLOADABLE_ZIP_FILE_PATH = (
|
||||
SCORE_DOWNLOADABLE_DIR / "Screening_Tool_Data.zip"
|
||||
SCORE_DOWNLOADABLE_CSV_ZIP_FILE_PATH = (
|
||||
SCORE_DOWNLOADABLE_DIR
|
||||
/ f"{version_str}-communities-csv-{timestamp_str}.zip"
|
||||
)
|
||||
SCORE_DOWNLOADABLE_XLS_ZIP_FILE_PATH = (
|
||||
SCORE_DOWNLOADABLE_DIR
|
||||
/ f"{version_str}-communities-xls-{timestamp_str}.zip"
|
||||
)
|
||||
|
||||
# For the codebook
|
||||
|
|
|
@ -527,8 +527,8 @@ class PostScoreETL(ExtractTransformLoad):
|
|||
csv_path = constants.SCORE_DOWNLOADABLE_CSV_FILE_PATH
|
||||
excel_path = constants.SCORE_DOWNLOADABLE_EXCEL_FILE_PATH
|
||||
codebook_path = constants.SCORE_DOWNLOADABLE_CODEBOOK_FILE_PATH
|
||||
zip_path = constants.SCORE_DOWNLOADABLE_ZIP_FILE_PATH
|
||||
pdf_path = constants.SCORE_DOWNLOADABLE_PDF_FILE_PATH
|
||||
csv_zip_path = constants.SCORE_DOWNLOADABLE_CSV_ZIP_FILE_PATH
|
||||
xls_zip_path = constants.SCORE_DOWNLOADABLE_XLS_ZIP_FILE_PATH
|
||||
|
||||
logger.info("Writing downloadable excel")
|
||||
excel_config = self._load_excel_from_df(
|
||||
|
@ -577,14 +577,19 @@ class PostScoreETL(ExtractTransformLoad):
|
|||
# load codebook to disk
|
||||
codebook_df.to_csv(codebook_path, index=False)
|
||||
|
||||
logger.info("Compressing files")
|
||||
logger.info("Compressing csv files")
|
||||
files_to_compress = [
|
||||
csv_path,
|
||||
codebook_path,
|
||||
]
|
||||
zip_files(csv_zip_path, files_to_compress)
|
||||
|
||||
logger.info("Compressing xls files")
|
||||
files_to_compress = [
|
||||
excel_path,
|
||||
codebook_path,
|
||||
pdf_path,
|
||||
]
|
||||
zip_files(zip_path, files_to_compress)
|
||||
zip_files(xls_zip_path, files_to_compress)
|
||||
|
||||
def load(self) -> None:
|
||||
self._load_score_csv_full(
|
||||
|
|
|
@ -73,7 +73,7 @@ def score_data_initial(sample_data_dir):
|
|||
|
||||
@pytest.fixture()
|
||||
def score_pdf_initial(sample_data_dir):
|
||||
return sample_data_dir / "Draft_Communities_List.pdf"
|
||||
return sample_data_dir / "draft_communities_list.pdf"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
|
|
@ -143,4 +143,5 @@ def test_load_downloadable_zip(etl, monkeypatch, score_data_expected):
|
|||
assert constants.SCORE_DOWNLOADABLE_DIR.is_dir()
|
||||
assert constants.SCORE_DOWNLOADABLE_CSV_FILE_PATH.is_file()
|
||||
assert constants.SCORE_DOWNLOADABLE_EXCEL_FILE_PATH.is_file()
|
||||
assert constants.SCORE_DOWNLOADABLE_ZIP_FILE_PATH.is_file()
|
||||
assert constants.SCORE_DOWNLOADABLE_CSV_ZIP_FILE_PATH.is_file()
|
||||
assert constants.SCORE_DOWNLOADABLE_XLS_ZIP_FILE_PATH.is_file()
|
||||
|
|
|
@ -119,7 +119,6 @@ class TestETL:
|
|||
"""
|
||||
# Setup
|
||||
etl = self._get_instance_of_etl_class()
|
||||
etl.__init__()
|
||||
data_path, tmp_path = mock_paths
|
||||
|
||||
assert etl.DATA_PATH == data_path
|
||||
|
@ -147,7 +146,6 @@ class TestETL:
|
|||
etl = self._get_instance_of_etl_class()
|
||||
data_path, tmp_path = mock_paths
|
||||
|
||||
etl.__init__()
|
||||
actual_file_path = etl._get_output_file_path()
|
||||
|
||||
expected_file_path = data_path / "dataset" / etl.NAME / "usa.csv"
|
||||
|
@ -250,7 +248,6 @@ class TestETL:
|
|||
etl = self._setup_etl_instance_and_run_extract(
|
||||
mock_etl=mock_etl, mock_paths=mock_paths
|
||||
)
|
||||
etl.__init__()
|
||||
etl.transform()
|
||||
|
||||
assert etl.output_df is not None
|
||||
|
@ -268,7 +265,6 @@ class TestETL:
|
|||
"""
|
||||
# setup - input variables
|
||||
etl = self._get_instance_of_etl_class()
|
||||
etl.__init__()
|
||||
|
||||
# setup - mock transform step
|
||||
df_transform = pd.read_csv(
|
||||
|
|
1892
data/data-pipeline/poetry.lock
generated
1892
data/data-pipeline/poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -19,7 +19,7 @@ packages = [
|
|||
CensusData = "^1.13"
|
||||
click = "8.0.4" # pinning for now per https://github.com/psf/black/issues/2964
|
||||
dynaconf = "^3.1.4"
|
||||
geopandas = "^0.9.0"
|
||||
geopandas = "^0.11.0"
|
||||
ipdb = "^0.13.9"
|
||||
ipython = "^7.31.1"
|
||||
jupyter = "^1.0.0"
|
||||
|
@ -39,6 +39,8 @@ tqdm = "4.62.0"
|
|||
types-requests = "^2.25.0"
|
||||
us = "^2.0.2"
|
||||
xlsxwriter = "^2.0.0"
|
||||
pydantic = "^1.9.0"
|
||||
Rtree = "^1.0.0"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
black = {version = "^21.6b0", allow-prereleases = true}
|
||||
|
@ -57,6 +59,7 @@ pytest-snapshot = "^0.8.1"
|
|||
nb-black = "^1.0.7"
|
||||
seaborn = "^0.11.2"
|
||||
papermill = "^2.3.4"
|
||||
jupyterlab = "^3.4.4"
|
||||
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
@ -67,7 +70,6 @@ requires = ["poetry-core>=1.0.0"]
|
|||
[tool.pylint."MESSAGE CONTROL"]
|
||||
disable = [
|
||||
"C0114", # Disables module docstrings
|
||||
"R0201", # Disables method could have been a function
|
||||
"R0903", # Disables too few public methods
|
||||
"C0103", # Disables name case styling
|
||||
"W0511", # Disables FIXME warning
|
||||
|
|
Loading…
Add table
Reference in a new issue