diff --git a/Makefile b/Makefile index a16208aac..df5db691d 100644 --- a/Makefile +++ b/Makefile @@ -58,7 +58,13 @@ test-gcs: sh ./dev/run-gcs-server.sh poetry run pytest tests/ -m gcs ${PYTEST_ARGS} -test-coverage: +test-coverage-unit: + poetry run coverage run --source=pyiceberg/ -m pytest tests/ -m "(unmarked or parametrize) and not integration" ${PYTEST_ARGS} + poetry run coverage report -m --fail-under=80 + poetry run coverage html + poetry run coverage xml + +test-coverage-integration: docker compose -f dev/docker-compose-integration.yml kill docker compose -f dev/docker-compose-integration.yml rm -f docker compose -f dev/docker-compose-integration.yml up -d @@ -67,11 +73,13 @@ test-coverage: sleep 10 docker compose -f dev/docker-compose-integration.yml cp ./dev/provision.py spark-iceberg:/opt/spark/provision.py docker compose -f dev/docker-compose-integration.yml exec -T spark-iceberg ipython ./provision.py - poetry run coverage run --source=pyiceberg/ -m pytest tests/ ${PYTEST_ARGS} + poetry run coverage run --source=pyiceberg/ -m pytest tests/ -m integration ${PYTEST_ARGS} poetry run coverage report -m --fail-under=90 poetry run coverage html poetry run coverage xml +test-coverage: | test-coverage-unit test-coverage-integration + clean: @echo "Cleaning up Cython and Python cached files" diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index cbfd64e19..3cf17c0e8 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -753,6 +753,7 @@ def test_configure_row_group_batch_size(session_catalog: Catalog) -> None: assert len(batches) == entries +@pytest.mark.integration @pytest.mark.parametrize("catalog", [pytest.lazy_fixture("session_catalog_hive"), pytest.lazy_fixture("session_catalog")]) def test_table_scan_default_to_large_types(catalog: Catalog) -> None: identifier = "default.test_table_scan_default_to_large_types"