Add pytest to tox run in CI/CD (#713)

* Add pytest to tox run in CI/CD

* Try fixing tox dependencies for pytest

* update poetry to get ci/cd passing

* Run poetry export with --dev flag to include dev dependencies such as pytest

* WIP updating test fixtures to include PDF

* Remove dev dependencies from reqs and add pytest to envlist to make build faster

* passing score_post tests

* Add pytest tox (#729)

* Fix failing pytest

* Fixes failing tox tests and updates requirements.txt to include dev deps

* pickle protocol 4

Co-authored-by: Shelby Switzer <shelby.switzer@cms.hhs.gov>
Co-authored-by: Jorge Escobar <jorge.e.escobar@omb.eop.gov>
Co-authored-by: Billy Daly <williamdaly422@gmail.com>
Co-authored-by: Jorge Escobar <83969469+esfoobar-usds@users.noreply.github.com>
This commit is contained in:
Shelby Switzer 2021-09-22 13:47:37 -04:00 committed by GitHub
commit d3a18352fc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 112 additions and 64 deletions

View file

@ -19,6 +19,7 @@ class ExtractTransformLoad:
DATA_PATH: Path = settings.APP_ROOT / "data"
TMP_PATH: Path = DATA_PATH / "tmp"
FILES_PATH: Path = settings.APP_ROOT / "files"
GEOID_FIELD_NAME: str = "GEOID10"
GEOID_TRACT_FIELD_NAME: str = "GEOID10_TRACT"
# TODO: investigate. Census says there are only 217,740 CBGs in the US.

View file

@ -43,7 +43,8 @@ DATA_SCORE_TILES_DIR = DATA_SCORE_DIR / "tiles"
SCORE_DOWNLOADABLE_DIR = DATA_SCORE_DIR / "downloadable"
SCORE_DOWNLOADABLE_CSV_FILE_PATH = SCORE_DOWNLOADABLE_DIR / "usa.csv"
SCORE_DOWNLOADABLE_EXCEL_FILE_PATH = SCORE_DOWNLOADABLE_DIR / "usa.xlsx"
SCORE_DOWNLOADABLE_PDF_FILE_PATH = FILES_PATH / "Draft_Communities_List.pdf"
SCORE_DOWNLOADABLE_PDF_FILE_NAME = "Draft_Communities_List.pdf"
SCORE_DOWNLOADABLE_PDF_FILE_PATH = FILES_PATH / SCORE_DOWNLOADABLE_PDF_FILE_NAME
SCORE_DOWNLOADABLE_ZIP_FILE_PATH = (
SCORE_DOWNLOADABLE_DIR / "Screening_Tool_Data.zip"
)

View file

@ -41,6 +41,7 @@ def etl(monkeypatch, root):
etl = PostScoreETL()
monkeypatch.setattr(etl, "DATA_PATH", root)
monkeypatch.setattr(etl, "TMP_PATH", tmp_path)
return etl
@ -65,6 +66,11 @@ def score_data_initial(sample_data_dir):
return sample_data_dir / "score_data_initial.csv"
@pytest.fixture()
def score_pdf_initial(sample_data_dir):
return sample_data_dir / "Draft_Communities_List.pdf"
@pytest.fixture()
def counties_transformed_expected():
return pd.DataFrame.from_dict(

File diff suppressed because one or more lines are too long

View file

@ -2,9 +2,10 @@
## Above disables warning about access to underscore-prefixed methods
from importlib import reload
from pathlib import Path
import pandas.api.types as ptypes
import pandas.testing as pdt
from data_pipeline.etl.score import constants
# See conftest.py for all fixtures used in these tests
@ -117,8 +118,17 @@ def test_load_tile_csv(etl, tile_data_expected):
assert constants.DATA_SCORE_CSV_TILES_FILE_PATH.is_file()
def test_load_downloadable_zip(etl, downloadable_data_expected):
def test_load_downloadable_zip(etl, monkeypatch, downloadable_data_expected):
reload(constants)
STATIC_FILES_PATH = (
Path.cwd() / "data_pipeline" / "files"
) # need to monkeypatch to real dir
monkeypatch.setattr(constants, "FILES_PATH", STATIC_FILES_PATH)
monkeypatch.setattr(
constants,
"SCORE_DOWNLOADABLE_PDF_FILE_PATH",
STATIC_FILES_PATH / constants.SCORE_DOWNLOADABLE_PDF_FILE_NAME,
)
etl._load_downloadable_zip(
downloadable_data_expected, constants.SCORE_DOWNLOADABLE_DIR
)