From 6841da72154202c9f759e7b3e2c424a51d7ae608 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Mon, 9 Jan 2023 15:49:52 -0800 Subject: [PATCH 01/16] fix whitespace issue with incremental sql (#458) * fix whitespace issue with incremental sql * add changie * add setuptools install * add twine and check-wheel-contents * try installing pkgconfig-lite * add --allow-empty-checksums * check pkg-config installs * install mac os pyicu dependencies * fix syntax issue * force agate 1.6.3 * add trailing comma --- .../unreleased/Fixes-20230109-105325.yaml | 7 +++ .github/workflows/main.yml | 3 +- .../incremental_strategy/merge.sql | 2 +- setup.py | 1 + .../test_incremental_materialization.py | 44 +++++++++++++++++++ 5 files changed, 54 insertions(+), 3 deletions(-) create mode 100644 .changes/unreleased/Fixes-20230109-105325.yaml create mode 100644 tests/functional/test_incremental_materialization.py diff --git a/.changes/unreleased/Fixes-20230109-105325.yaml b/.changes/unreleased/Fixes-20230109-105325.yaml new file mode 100644 index 000000000..2a16883fc --- /dev/null +++ b/.changes/unreleased/Fixes-20230109-105325.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: stop eliminating trailing whitespace in incremental merge sql +time: 2023-01-09T10:53:25.837837-08:00 +custom: + Author: colin-rogers-dbt + Issue: "457" + PR: "458" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 0c7d51fa8..140557beb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -182,11 +182,10 @@ jobs: uses: actions/setup-python@v4.3.0 with: python-version: ${{ matrix.python-version }} - - name: Install python dependencies run: | python -m pip install --user --upgrade pip - python -m pip install --upgrade wheel + python -m pip install --upgrade wheel setuptools twine check-wheel-contents python -m pip --version - uses: actions/download-artifact@v2 with: diff --git a/dbt/include/bigquery/macros/materializations/incremental_strategy/merge.sql b/dbt/include/bigquery/macros/materializations/incremental_strategy/merge.sql index 5efb079b6..57c88dbc8 100644 --- a/dbt/include/bigquery/macros/materializations/incremental_strategy/merge.sql +++ b/dbt/include/bigquery/macros/materializations/incremental_strategy/merge.sql @@ -16,7 +16,7 @@ {{ wrap_with_time_ingestion_partitioning_sql(build_partition_time_exp(partition_by), sql, True) }} {%- else -%} {{sql}} - {%- endif -%} + {%- endif %} ) {%- endif -%} {%- endset -%} diff --git a/setup.py b/setup.py index 07f501ce6..52defb47d 100644 --- a/setup.py +++ b/setup.py @@ -81,6 +81,7 @@ def _dbt_core_version(plugin_version: str) -> str: "googleapis-common-protos~=1.6", "google-cloud-storage~=2.4", "google-cloud-dataproc~=5.0", + "agate>=1.6.3,<1.7", ], zip_safe=False, classifiers=[ diff --git a/tests/functional/test_incremental_materialization.py b/tests/functional/test_incremental_materialization.py new file mode 100644 index 000000000..6932363b3 --- /dev/null +++ b/tests/functional/test_incremental_materialization.py @@ -0,0 +1,44 @@ +import pytest +import os +from dbt.tests.util import ( + run_dbt +) + +# This is a short term hack, we need to go back +# and make adapter implementations of: +# https://github.com/dbt-labs/dbt-core/pull/6330 + +_INCREMENTAL_MODEL = """ +{{ + config( + materialized="incremental", + ) +}} + +{% if not is_incremental() %} + + select 10 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 30 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour + +{% else %} + + select 20 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 40 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour + +{% endif %} +-- Test Comment To Prevent Reccurence of https://github.com/dbt-labs/dbt-core/issues/6485 +""" + +class BaseIncrementalModelConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "test_incremental.sql": _INCREMENTAL_MODEL + } + +class TestIncrementalModel(BaseIncrementalModelConfig): + def test_incremental_model_succeeds(self, project): + results = run_dbt(["run"]) + assert len(results) == 1 + results = run_dbt(["run"]) + assert len(results) == 1 \ No newline at end of file From a5fcce91befc05949a3597e4bc84d526f6f1830e Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Tue, 10 Jan 2023 00:44:13 -0600 Subject: [PATCH 02/16] take over of PR #174 to get changelog in order to merge (#456) * take over of PR #174 to get changelog in order to merge * try and fix wheel * trying to change order of operations for wheel install * try manually installing PyICU * reset to default * test colin changes * reset to default state Co-authored-by: Halvor Lund Co-authored-by: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> --- .changes/unreleased/Features-20230109-105921.yaml | 7 +++++++ dbt/include/bigquery/macros/catalog.sql | 10 ++++++++++ 2 files changed, 17 insertions(+) create mode 100644 .changes/unreleased/Features-20230109-105921.yaml diff --git a/.changes/unreleased/Features-20230109-105921.yaml b/.changes/unreleased/Features-20230109-105921.yaml new file mode 100644 index 000000000..0eaec0dcb --- /dev/null +++ b/.changes/unreleased/Features-20230109-105921.yaml @@ -0,0 +1,7 @@ +kind: Features +body: Fetch table description to `catalog.json`, so it can be shown in dbt docs +time: 2023-01-09T10:59:21.213259-06:00 +custom: + Author: McKnight-42, halvorlu + Issue: "173" + PR: "174" diff --git a/dbt/include/bigquery/macros/catalog.sql b/dbt/include/bigquery/macros/catalog.sql index 6822d88a6..d430b80f8 100644 --- a/dbt/include/bigquery/macros/catalog.sql +++ b/dbt/include/bigquery/macros/catalog.sql @@ -35,6 +35,14 @@ ) ), + table_options as ( + select + concat(table_catalog, '.', table_schema, '.', table_name) as relation_id, + JSON_VALUE(option_value) as table_comment + + from {{ information_schema.replace(information_schema_view='TABLE_OPTIONS') }} + where option_name = 'description' + ), extracted as ( select *, @@ -146,6 +154,7 @@ else unsharded_tables.table_name end as table_name, unsharded_tables.table_type, + table_options.table_comment, -- coalesce name and type for External tables - these columns are not -- present in the COLUMN_FIELD_PATHS resultset @@ -198,6 +207,7 @@ -- sure that column metadata is picked up through the join. This will only -- return the column information for the "max" table in a date-sharded table set from unsharded_tables + left join table_options using (relation_id) left join columns using (relation_id) left join column_stats using (relation_id) {%- endset -%} From 2e06977a118e07c5d7ad242b83bac913f9ee05d7 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 10 Jan 2023 12:50:42 -0800 Subject: [PATCH 03/16] fix release.yml pypi steps (#461) --- .github/workflows/release.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 91c3114e4..5fd0291e9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -193,7 +193,7 @@ jobs: pypi-release: name: Pypi release # only release to PyPi if we're not testing - will release to PyPi test when workflow gets rewritten - if: inputs.test_run == 'false' + if: ${{ inputs.test_run == false }} runs-on: ubuntu-latest @@ -201,10 +201,11 @@ jobs: environment: PypiProd steps: - - uses: actions/download-artifact@v2 + - name: "Download Build Artifact - ${{ inputs.version_number }}" + uses: actions/download-artifact@v3 with: - name: dist - path: 'dist' + name: ${{ inputs.version_number }} + path: dist/ - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@v1.4.2 From 3ce88d75ac78b9aac1dcaa184c94e235f661a6f0 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 10 Jan 2023 19:19:11 -0600 Subject: [PATCH 04/16] update exception names (#460) * update exception names * point back to main * Update Under the Hood-20230110-110016.yaml --- .../Under the Hood-20230110-110016.yaml | 7 ++++ dbt/adapters/bigquery/connections.py | 18 ++++----- dbt/adapters/bigquery/gcloud.py | 2 +- dbt/adapters/bigquery/impl.py | 22 +++++------ dbt/adapters/bigquery/relation.py | 4 +- .../schema_tests_test/test_schema_v2_tests.py | 2 - .../test_simple_snapshot.py | 1 - .../integration/sources_test/test_sources.py | 1 - tests/unit/test_bigquery_adapter.py | 38 +++++++++---------- 9 files changed, 48 insertions(+), 47 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20230110-110016.yaml diff --git a/.changes/unreleased/Under the Hood-20230110-110016.yaml b/.changes/unreleased/Under the Hood-20230110-110016.yaml new file mode 100644 index 000000000..3f428a801 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230110-110016.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Update exception names to match dbt-core +time: 2023-01-10T11:00:16.649793-06:00 +custom: + Author: emmyoop + Issue: "441" + PR: "460" diff --git a/dbt/adapters/bigquery/connections.py b/dbt/adapters/bigquery/connections.py index 7a09eb0af..715dfb36a 100644 --- a/dbt/adapters/bigquery/connections.py +++ b/dbt/adapters/bigquery/connections.py @@ -24,9 +24,9 @@ from dbt.tracking import active_user from dbt.contracts.connection import ConnectionState, AdapterResponse from dbt.exceptions import ( - FailedToConnectException, - RuntimeException, - DatabaseException, + FailedToConnectError, + DbtRuntimeError, + DbtDatabaseError, DbtProfileError, ) from dbt.adapters.base import BaseConnectionManager, Credentials @@ -196,7 +196,7 @@ def handle_error(cls, error, message): error.query_job.location, error.query_job.project, error.query_job.job_id ) ) - raise DatabaseException(error_msg) + raise DbtDatabaseError(error_msg) def clear_transaction(self): pass @@ -223,12 +223,12 @@ def exception_handler(self, sql): "account you are trying to impersonate.\n\n" f"{str(e)}" ) - raise RuntimeException(message) + raise DbtRuntimeError(message) except Exception as e: logger.debug("Unhandled error while running:\n{}".format(sql)) logger.debug(e) - if isinstance(e, RuntimeException): + if isinstance(e, DbtRuntimeError): # during a sql query, an internal to dbt exception was raised. # this sounds a lot like a signal handler and probably has # useful information, so raise it without modification. @@ -238,7 +238,7 @@ def exception_handler(self, sql): # don't want to log. Hopefully they never change this! if BQ_QUERY_JOB_SPLIT in exc_message: exc_message = exc_message.split(BQ_QUERY_JOB_SPLIT)[0].strip() - raise RuntimeException(exc_message) + raise DbtRuntimeError(exc_message) def cancel_open(self) -> None: pass @@ -305,7 +305,7 @@ def get_google_credentials(cls, profile_credentials) -> GoogleCredentials: ) error = 'Invalid `method` in profile: "{}"'.format(method) - raise FailedToConnectException(error) + raise FailedToConnectError(error) @classmethod def get_impersonated_credentials(cls, profile_credentials): @@ -362,7 +362,7 @@ def open(cls, connection): connection.handle = None connection.state = "fail" - raise FailedToConnectException(str(e)) + raise FailedToConnectError(str(e)) connection.handle = handle connection.state = "open" diff --git a/dbt/adapters/bigquery/gcloud.py b/dbt/adapters/bigquery/gcloud.py index eb418e93b..c303097bc 100644 --- a/dbt/adapters/bigquery/gcloud.py +++ b/dbt/adapters/bigquery/gcloud.py @@ -25,4 +25,4 @@ def setup_default_credentials(): if gcloud_installed(): run_cmd(".", ["gcloud", "auth", "application-default", "login"]) else: - raise dbt.exceptions.RuntimeException(NOT_INSTALLED_MSG) + raise dbt.exceptions.DbtRuntimeError(NOT_INSTALLED_MSG) diff --git a/dbt/adapters/bigquery/impl.py b/dbt/adapters/bigquery/impl.py index ec749c316..920b992a0 100644 --- a/dbt/adapters/bigquery/impl.py +++ b/dbt/adapters/bigquery/impl.py @@ -55,7 +55,7 @@ def sql_escape(string): if not isinstance(string, str): - raise dbt.exceptions.CompilationException(f"cannot escape a non-string: {string}") + raise dbt.exceptions.CompilationError(f"cannot escape a non-string: {string}") return json.dumps(string)[1:-1] @@ -98,9 +98,9 @@ def parse(cls, raw_partition_by) -> Optional["PartitionConfig"]: cls.validate(raw_partition_by) return cls.from_dict(raw_partition_by) except ValidationError as exc: - raise dbt.exceptions.ValidationException("Could not parse partition config") from exc + raise dbt.exceptions.DbtValidationError("Could not parse partition config") from exc except TypeError: - raise dbt.exceptions.CompilationException( + raise dbt.exceptions.CompilationError( f"Invalid partition_by config:\n" f" Got: {raw_partition_by}\n" f' Expected a dictionary with "field" and "data_type" keys' @@ -173,9 +173,7 @@ def drop_relation(self, relation: BigQueryRelation) -> None: conn.handle.delete_table(table_ref) def truncate_relation(self, relation: BigQueryRelation) -> None: - raise dbt.exceptions.NotImplementedException( - "`truncate` is not implemented for this adapter!" - ) + raise dbt.exceptions.NotImplementedError("`truncate` is not implemented for this adapter!") def rename_relation( self, from_relation: BigQueryRelation, to_relation: BigQueryRelation @@ -191,7 +189,7 @@ def rename_relation( or from_relation.type == RelationType.View or to_relation.type == RelationType.View ): - raise dbt.exceptions.RuntimeException( + raise dbt.exceptions.DbtRuntimeError( "Renaming of views is not currently supported in BigQuery" ) @@ -440,7 +438,7 @@ def copy_table(self, source, destination, materialization): elif materialization == "table": write_disposition = WRITE_TRUNCATE else: - dbt.exceptions.raise_compiler_error( + raise dbt.exceptions.CompilationError( 'Copy table materialization must be "copy" or "table", but ' f"config.get('copy_materialization', 'table') was " f"{materialization}" @@ -473,11 +471,11 @@ def poll_until_job_completes(cls, job, timeout): job.reload() if job.state != "DONE": - raise dbt.exceptions.RuntimeException("BigQuery Timeout Exceeded") + raise dbt.exceptions.DbtRuntimeError("BigQuery Timeout Exceeded") elif job.error_result: message = "\n".join(error["message"].strip() for error in job.errors) - raise dbt.exceptions.RuntimeException(message) + raise dbt.exceptions.DbtRuntimeError(message) def _bq_table_to_relation(self, bq_table): if bq_table is None: @@ -502,7 +500,7 @@ def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False): if self.nice_connection_name() in ["on-run-start", "on-run-end"]: self.warning_on_hooks(self.nice_connection_name()) else: - raise dbt.exceptions.NotImplementedException( + raise dbt.exceptions.NotImplementedError( "`add_query` is not implemented for this adapter!" ) @@ -860,7 +858,7 @@ def string_add_sql( elif location == "prepend": return f"concat('{value}', {add_to})" else: - raise dbt.exceptions.RuntimeException( + raise dbt.exceptions.DbtRuntimeError( f'Got an unexpected location value of "{location}"' ) diff --git a/dbt/adapters/bigquery/relation.py b/dbt/adapters/bigquery/relation.py index 7224de8cf..d49677168 100644 --- a/dbt/adapters/bigquery/relation.py +++ b/dbt/adapters/bigquery/relation.py @@ -4,7 +4,7 @@ from itertools import chain, islice from dbt.adapters.base.relation import BaseRelation, ComponentName, InformationSchema -from dbt.exceptions import raise_compiler_error +from dbt.exceptions import CompilationError from dbt.utils import filter_null_values from typing import TypeVar @@ -94,7 +94,7 @@ def from_relation(cls, relation, information_schema_view): msg = ( f'No location/region found when trying to retrieve "{information_schema_view}"' ) - raise raise_compiler_error(msg) + raise CompilationError(msg) info_schema = info_schema.incorporate(location=relation.location) return info_schema diff --git a/tests/integration/schema_tests_test/test_schema_v2_tests.py b/tests/integration/schema_tests_test/test_schema_v2_tests.py index 0709fdb44..7b213db8a 100644 --- a/tests/integration/schema_tests_test/test_schema_v2_tests.py +++ b/tests/integration/schema_tests_test/test_schema_v2_tests.py @@ -2,8 +2,6 @@ import os from dbt.task.test import TestTask -from dbt.exceptions import CompilationException -from dbt.contracts.results import TestStatus class TestBQSchemaTests(DBTIntegrationTest): @property diff --git a/tests/integration/simple_snapshot_test/test_simple_snapshot.py b/tests/integration/simple_snapshot_test/test_simple_snapshot.py index 7f6c134c3..191dde72c 100644 --- a/tests/integration/simple_snapshot_test/test_simple_snapshot.py +++ b/tests/integration/simple_snapshot_test/test_simple_snapshot.py @@ -1,7 +1,6 @@ from tests.integration.base import DBTIntegrationTest, use_profile from datetime import datetime import pytz -import dbt.exceptions class BaseSimpleSnapshotTest(DBTIntegrationTest): diff --git a/tests/integration/sources_test/test_sources.py b/tests/integration/sources_test/test_sources.py index 21dbb19bf..83a586434 100644 --- a/tests/integration/sources_test/test_sources.py +++ b/tests/integration/sources_test/test_sources.py @@ -4,7 +4,6 @@ import yaml -from dbt.exceptions import CompilationException import dbt.tracking import dbt.version from dbt.events.functions import reset_metadata_vars diff --git a/tests/unit/test_bigquery_adapter.py b/tests/unit/test_bigquery_adapter.py index cf8bcdfd8..29c7a9e3d 100644 --- a/tests/unit/test_bigquery_adapter.py +++ b/tests/unit/test_bigquery_adapter.py @@ -156,8 +156,8 @@ def test_acquire_connection_oauth_no_project_validations(self, mock_open_connect connection = adapter.acquire_connection('dummy') self.assertEqual(connection.type, 'bigquery') - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) except BaseException as e: raise @@ -173,8 +173,8 @@ def test_acquire_connection_oauth_validations(self, mock_open_connection): connection = adapter.acquire_connection('dummy') self.assertEqual(connection.type, 'bigquery') - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) except BaseException as e: raise @@ -190,8 +190,8 @@ def test_acquire_connection_service_account_validations(self, mock_open_connecti connection = adapter.acquire_connection('dummy') self.assertEqual(connection.type, 'bigquery') - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) except BaseException as e: raise @@ -207,8 +207,8 @@ def test_acquire_connection_oauth_token_validations(self, mock_open_connection): connection = adapter.acquire_connection('dummy') self.assertEqual(connection.type, 'bigquery') - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) except BaseException as e: raise @@ -224,8 +224,8 @@ def test_acquire_connection_oauth_credentials_validations(self, mock_open_connec connection = adapter.acquire_connection('dummy') self.assertEqual(connection.type, 'bigquery') - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) except BaseException as e: raise @@ -241,8 +241,8 @@ def test_acquire_connection_impersonated_service_account_validations(self, mock_ connection = adapter.acquire_connection('dummy') self.assertEqual(connection.type, 'bigquery') - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) except BaseException as e: raise @@ -259,8 +259,8 @@ def test_acquire_connection_priority(self, mock_open_connection): self.assertEqual(connection.type, 'bigquery') self.assertEqual(connection.credentials.priority, 'batch') - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) mock_open_connection.assert_not_called() connection.handle @@ -274,8 +274,8 @@ def test_acquire_connection_maximum_bytes_billed(self, mock_open_connection): self.assertEqual(connection.type, 'bigquery') self.assertEqual(connection.credentials.maximum_bytes_billed, 0) - except dbt.exceptions.ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except dbt.exceptions.DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) mock_open_connection.assert_not_called() connection.handle @@ -674,10 +674,10 @@ def test_copy_table_materialization_incremental(self): def test_parse_partition_by(self): adapter = self.get_adapter('oauth') - with self.assertRaises(dbt.exceptions.ValidationException): + with self.assertRaises(dbt.exceptions.DbtValidationError): adapter.parse_partition_by("date(ts)") - with self.assertRaises(dbt.exceptions.ValidationException): + with self.assertRaises(dbt.exceptions.DbtValidationError): adapter.parse_partition_by("ts") self.assertEqual( @@ -841,7 +841,7 @@ def test_parse_partition_by(self): ) # Invalid, should raise an error - with self.assertRaises(dbt.exceptions.ValidationException): + with self.assertRaises(dbt.exceptions.DbtValidationError): adapter.parse_partition_by({}) # passthrough From 02132388891d2ab413b6ed10b4f4e1d5861a9c4e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 12 Jan 2023 11:04:15 -0800 Subject: [PATCH 05/16] Bumping version to 1.5.0a1 and generate changelog (#465) * Bumping version to 1.5.0a1 and generate CHANGELOG * manual changie updates Co-authored-by: Github Build Bot Co-authored-by: Colin --- .bumpversion.cfg | 2 +- .changes/0.0.0.md | 1 + .changes/1.4.0-b1.md | 18 ------------------ .../1.4.0/Dependencies-20221010-181501.yaml | 7 ------- .changes/1.4.0/Features-20220807-164227.yaml | 7 ------- .changes/1.4.0/Features-20220926-105700.yaml | 7 ------- .changes/1.4.0/Features-20221020-223914.yaml | 8 -------- .changes/1.4.0/Fixes-20221019-115618.yaml | 7 ------- .changes/1.4.0/Fixes-20221026-192327.yaml | 7 ------- .../1.4.0/Under the Hood-20221207-103505.yaml | 7 ------- .../1.4.0/Under the Hood-20221207-151813.yaml | 8 -------- .../Dependencies-20221213-135331.yaml | 7 ------- .../unreleased/Features-20220823-085601.yaml | 7 ------- .../unreleased/Features-20221119-115200.yaml | 7 ------- .../unreleased/Features-20230109-105921.yaml | 7 ------- .../unreleased/Fixes-20230109-105325.yaml | 7 ------- .../Under the Hood-20220805-165120.yaml | 7 ------- .../Under the Hood-20221219-212421.yaml | 7 ------- .../Under the Hood-20221221-124711.yaml | 8 -------- .../Under the Hood-20230110-110016.yaml | 7 ------- CHANGELOG.md | 19 +------------------ dbt/adapters/bigquery/__version__.py | 2 +- setup.py | 2 +- 23 files changed, 5 insertions(+), 161 deletions(-) delete mode 100644 .changes/1.4.0-b1.md delete mode 100644 .changes/1.4.0/Dependencies-20221010-181501.yaml delete mode 100644 .changes/1.4.0/Features-20220807-164227.yaml delete mode 100644 .changes/1.4.0/Features-20220926-105700.yaml delete mode 100644 .changes/1.4.0/Features-20221020-223914.yaml delete mode 100644 .changes/1.4.0/Fixes-20221019-115618.yaml delete mode 100644 .changes/1.4.0/Fixes-20221026-192327.yaml delete mode 100644 .changes/1.4.0/Under the Hood-20221207-103505.yaml delete mode 100644 .changes/1.4.0/Under the Hood-20221207-151813.yaml delete mode 100644 .changes/unreleased/Dependencies-20221213-135331.yaml delete mode 100644 .changes/unreleased/Features-20220823-085601.yaml delete mode 100644 .changes/unreleased/Features-20221119-115200.yaml delete mode 100644 .changes/unreleased/Features-20230109-105921.yaml delete mode 100644 .changes/unreleased/Fixes-20230109-105325.yaml delete mode 100644 .changes/unreleased/Under the Hood-20220805-165120.yaml delete mode 100644 .changes/unreleased/Under the Hood-20221219-212421.yaml delete mode 100644 .changes/unreleased/Under the Hood-20221221-124711.yaml delete mode 100644 .changes/unreleased/Under the Hood-20230110-110016.yaml diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 487f15b2a..ba1f95c9a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.4.0b1 +current_version = 1.5.0a1 parse = (?P\d+) \.(?P\d+) \.(?P\d+) diff --git a/.changes/0.0.0.md b/.changes/0.0.0.md index f27e1ab71..b719cfe93 100644 --- a/.changes/0.0.0.md +++ b/.changes/0.0.0.md @@ -1,5 +1,6 @@ ## Previous Releases For information on prior major and minor releases, see their changelogs: +* [1.4](https://github.com/dbt-labs/dbt-bigquery/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-bigquery/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-bigquery/blob/1.2.latest/CHANGELOG.md) - [1.1](https://github.com/dbt-labs/dbt-bigquery/blob/1.1.latest/CHANGELOG.md) diff --git a/.changes/1.4.0-b1.md b/.changes/1.4.0-b1.md deleted file mode 100644 index 22f898ceb..000000000 --- a/.changes/1.4.0-b1.md +++ /dev/null @@ -1,18 +0,0 @@ -## dbt-bigquery 1.4.0-b1 - December 15, 2022 -### Features -- Support for ingestion time partition table on BigQuery as incremental materialization ([#75](https://github.com/dbt-labs/dbt-bigquery/issues/75), [#136](https://github.com/dbt-labs/dbt-bigquery/pull/136)) -- Migrate dbt-utils current_timestamp macros into core + adapters ([#324](https://github.com/dbt-labs/dbt-bigquery/issues/324), [#323](https://github.com/dbt-labs/dbt-bigquery/pull/323)) -- Optimize insert_overwrite incremental strategy with WRITE_TRUNCATE / Partition copy ([#77](https://github.com/dbt-labs/dbt-bigquery/issues/77), [#167](https://github.com/dbt-labs/dbt-bigquery/pull/167)) -### Fixes -- use execution_project in python models ([#355](https://github.com/dbt-labs/dbt-bigquery/issues/355), [#356](https://github.com/dbt-labs/dbt-bigquery/pull/356)) -- fix args for console link logging ([#362](https://github.com/dbt-labs/dbt-bigquery/issues/362), [#363](https://github.com/dbt-labs/dbt-bigquery/pull/363)) -### Under the Hood -- event logging changes to be in parity with dbt-core post pr#6291 ([#6139](https://github.com/dbt-labs/dbt-bigquery/issues/6139), [#410](https://github.com/dbt-labs/dbt-bigquery/pull/410)) -- fix issue with tox 4.0.0 release which changes passenv syntax for space-separated variables ([#411](https://github.com/dbt-labs/dbt-bigquery/issues/411), [#411](https://github.com/dbt-labs/dbt-bigquery/pull/411)) -### Dependencies -- Bump google-cloud-bigquery dependency to < 3.3.3 ([#269](https://github.com/dbt-labs/dbt-bigquery/issues/269), [#332](https://github.com/dbt-labs/dbt-bigquery/pull/332)) - -### Contributors -- [@Kayrnt](https://github.com/Kayrnt) ([#136](https://github.com/dbt-labs/dbt-bigquery/pull/136), [#167](https://github.com/dbt-labs/dbt-bigquery/pull/167), [#363](https://github.com/dbt-labs/dbt-bigquery/pull/363)) -- [@eplus-aolalere](https://github.com/eplus-aolalere) ([#332](https://github.com/dbt-labs/dbt-bigquery/pull/332)) -- [@ericapetersson](https://github.com/ericapetersson) ([#356](https://github.com/dbt-labs/dbt-bigquery/pull/356)) diff --git a/.changes/1.4.0/Dependencies-20221010-181501.yaml b/.changes/1.4.0/Dependencies-20221010-181501.yaml deleted file mode 100644 index 3fff057ad..000000000 --- a/.changes/1.4.0/Dependencies-20221010-181501.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Dependencies -body: Bump google-cloud-bigquery dependency to < 3.3.3 -time: 2022-10-10T18:15:01.113219+01:00 -custom: - Author: eplus-aolalere - Issue: "269" - PR: "332" diff --git a/.changes/1.4.0/Features-20220807-164227.yaml b/.changes/1.4.0/Features-20220807-164227.yaml deleted file mode 100644 index 9352edc27..000000000 --- a/.changes/1.4.0/Features-20220807-164227.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Support for ingestion time partition table on BigQuery as incremental materialization -time: 2022-08-07T16:42:27.232818+02:00 -custom: - Author: Kayrnt - Issue: "75" - PR: "136" diff --git a/.changes/1.4.0/Features-20220926-105700.yaml b/.changes/1.4.0/Features-20220926-105700.yaml deleted file mode 100644 index 61e0ac741..000000000 --- a/.changes/1.4.0/Features-20220926-105700.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Migrate dbt-utils current_timestamp macros into core + adapters -time: 2022-09-26T10:57:00.942765-07:00 -custom: - Author: colin-rogers-dbt - Issue: "324" - PR: "323" diff --git a/.changes/1.4.0/Features-20221020-223914.yaml b/.changes/1.4.0/Features-20221020-223914.yaml deleted file mode 100644 index 58b8e60a3..000000000 --- a/.changes/1.4.0/Features-20221020-223914.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: Features -body: Optimize insert_overwrite incremental strategy with WRITE_TRUNCATE / Partition - copy -time: 2022-10-20T22:39:14.091878+02:00 -custom: - Author: Kayrnt - Issue: "77" - PR: "167" diff --git a/.changes/1.4.0/Fixes-20221019-115618.yaml b/.changes/1.4.0/Fixes-20221019-115618.yaml deleted file mode 100644 index 4aef9955a..000000000 --- a/.changes/1.4.0/Fixes-20221019-115618.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: use execution_project in python models -time: 2022-10-19T11:56:18.842401+02:00 -custom: - Author: ericapetersson - Issue: "355" - PR: "356" diff --git a/.changes/1.4.0/Fixes-20221026-192327.yaml b/.changes/1.4.0/Fixes-20221026-192327.yaml deleted file mode 100644 index 515095c98..000000000 --- a/.changes/1.4.0/Fixes-20221026-192327.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: fix args for console link logging -time: 2022-10-26T19:23:27.916326+02:00 -custom: - Author: Kayrnt - Issue: "362" - PR: "363" diff --git a/.changes/1.4.0/Under the Hood-20221207-103505.yaml b/.changes/1.4.0/Under the Hood-20221207-103505.yaml deleted file mode 100644 index ea1e3a62f..000000000 --- a/.changes/1.4.0/Under the Hood-20221207-103505.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: event logging changes to be in parity with dbt-core post pr#6291 -time: 2022-12-07T10:35:05.567885-06:00 -custom: - Author: McKnight-42 - Issue: "6139" - PR: "410" diff --git a/.changes/1.4.0/Under the Hood-20221207-151813.yaml b/.changes/1.4.0/Under the Hood-20221207-151813.yaml deleted file mode 100644 index 32084a6a7..000000000 --- a/.changes/1.4.0/Under the Hood-20221207-151813.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: Under the Hood -body: fix issue with tox 4.0.0 release which changes passenv syntax for space-separated - variables -time: 2022-12-07T15:18:13.996118-06:00 -custom: - Author: McKnight-42 - Issue: "411" - PR: "411" diff --git a/.changes/unreleased/Dependencies-20221213-135331.yaml b/.changes/unreleased/Dependencies-20221213-135331.yaml deleted file mode 100644 index b26c5da1a..000000000 --- a/.changes/unreleased/Dependencies-20221213-135331.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Dependencies -body: Add supoort for python 3.11 -time: 2022-12-13T13:53:31.182619-06:00 -custom: - Author: McKnight-42 - Issue: "407" - PR: "431" diff --git a/.changes/unreleased/Features-20220823-085601.yaml b/.changes/unreleased/Features-20220823-085601.yaml deleted file mode 100644 index d8bcd51bc..000000000 --- a/.changes/unreleased/Features-20220823-085601.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: incremental predicates -time: 2022-08-23T08:56:01.043831-05:00 -custom: - Author: dave-connors-3 - Issue: "283" - PR: "284" diff --git a/.changes/unreleased/Features-20221119-115200.yaml b/.changes/unreleased/Features-20221119-115200.yaml deleted file mode 100644 index 04189c19c..000000000 --- a/.changes/unreleased/Features-20221119-115200.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Support BigQuery 'labels' and 'hours_to_expiration' configs for seeds. -time: 2022-11-19T11:52:00.80272+11:00 -custom: - Author: darrylng - Issue: "125" - PR: "133" diff --git a/.changes/unreleased/Features-20230109-105921.yaml b/.changes/unreleased/Features-20230109-105921.yaml deleted file mode 100644 index 0eaec0dcb..000000000 --- a/.changes/unreleased/Features-20230109-105921.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Fetch table description to `catalog.json`, so it can be shown in dbt docs -time: 2023-01-09T10:59:21.213259-06:00 -custom: - Author: McKnight-42, halvorlu - Issue: "173" - PR: "174" diff --git a/.changes/unreleased/Fixes-20230109-105325.yaml b/.changes/unreleased/Fixes-20230109-105325.yaml deleted file mode 100644 index 2a16883fc..000000000 --- a/.changes/unreleased/Fixes-20230109-105325.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: stop eliminating trailing whitespace in incremental merge sql -time: 2023-01-09T10:53:25.837837-08:00 -custom: - Author: colin-rogers-dbt - Issue: "457" - PR: "458" diff --git a/.changes/unreleased/Under the Hood-20220805-165120.yaml b/.changes/unreleased/Under the Hood-20220805-165120.yaml deleted file mode 100644 index dacf18dfe..000000000 --- a/.changes/unreleased/Under the Hood-20220805-165120.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: use gcp retry logic for new connections -time: 2022-08-05T16:51:20.590446-04:00 -custom: - Author: nathaniel-may - Issue: "229" - PR: "230" diff --git a/.changes/unreleased/Under the Hood-20221219-212421.yaml b/.changes/unreleased/Under the Hood-20221219-212421.yaml deleted file mode 100644 index c381a18c5..000000000 --- a/.changes/unreleased/Under the Hood-20221219-212421.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Truncates Job Labels to 63 Characters -time: 2022-12-19T21:24:21.304277Z -custom: - Author: ernestoongaro - Issue: "202" - PR: "442" diff --git a/.changes/unreleased/Under the Hood-20221221-124711.yaml b/.changes/unreleased/Under the Hood-20221221-124711.yaml deleted file mode 100644 index fcb83f9eb..000000000 --- a/.changes/unreleased/Under the Hood-20221221-124711.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: Under the Hood -body: Partition By unit test was failing, referenced the incorrect exception to be - raised -time: 2022-12-21T12:47:11.619388-05:00 -custom: - Author: mikealfare - Issue: "443" - PR: "445" diff --git a/.changes/unreleased/Under the Hood-20230110-110016.yaml b/.changes/unreleased/Under the Hood-20230110-110016.yaml deleted file mode 100644 index 3f428a801..000000000 --- a/.changes/unreleased/Under the Hood-20230110-110016.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Update exception names to match dbt-core -time: 2023-01-10T11:00:16.649793-06:00 -custom: - Author: emmyoop - Issue: "441" - PR: "460" diff --git a/CHANGELOG.md b/CHANGELOG.md index f1e182733..56e5e7226 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,26 +4,9 @@ - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-bigquery/blob/main/CONTRIBUTING.md#adding-changelog-entry) -## dbt-bigquery 1.4.0-b1 - December 15, 2022 -### Features -- Support for ingestion time partition table on BigQuery as incremental materialization ([#75](https://github.com/dbt-labs/dbt-bigquery/issues/75), [#136](https://github.com/dbt-labs/dbt-bigquery/pull/136)) -- Migrate dbt-utils current_timestamp macros into core + adapters ([#324](https://github.com/dbt-labs/dbt-bigquery/issues/324), [#323](https://github.com/dbt-labs/dbt-bigquery/pull/323)) -- Optimize insert_overwrite incremental strategy with WRITE_TRUNCATE / Partition copy ([#77](https://github.com/dbt-labs/dbt-bigquery/issues/77), [#167](https://github.com/dbt-labs/dbt-bigquery/pull/167)) -### Fixes -- use execution_project in python models ([#355](https://github.com/dbt-labs/dbt-bigquery/issues/355), [#356](https://github.com/dbt-labs/dbt-bigquery/pull/356)) -- fix args for console link logging ([#362](https://github.com/dbt-labs/dbt-bigquery/issues/362), [#363](https://github.com/dbt-labs/dbt-bigquery/pull/363)) -### Under the Hood -- event logging changes to be in parity with dbt-core post pr#6291 ([#6139](https://github.com/dbt-labs/dbt-bigquery/issues/6139), [#410](https://github.com/dbt-labs/dbt-bigquery/pull/410)) -- fix issue with tox 4.0.0 release which changes passenv syntax for space-separated variables ([#411](https://github.com/dbt-labs/dbt-bigquery/issues/411), [#411](https://github.com/dbt-labs/dbt-bigquery/pull/411)) -### Dependencies -- Bump google-cloud-bigquery dependency to < 3.3.3 ([#269](https://github.com/dbt-labs/dbt-bigquery/issues/269), [#332](https://github.com/dbt-labs/dbt-bigquery/pull/332)) - -### Contributors -- [@Kayrnt](https://github.com/Kayrnt) ([#136](https://github.com/dbt-labs/dbt-bigquery/pull/136), [#167](https://github.com/dbt-labs/dbt-bigquery/pull/167), [#363](https://github.com/dbt-labs/dbt-bigquery/pull/363)) -- [@eplus-aolalere](https://github.com/eplus-aolalere) ([#332](https://github.com/dbt-labs/dbt-bigquery/pull/332)) -- [@ericapetersson](https://github.com/ericapetersson) ([#356](https://github.com/dbt-labs/dbt-bigquery/pull/356)) ## Previous Releases For information on prior major and minor releases, see their changelogs: +* [1.4](https://github.com/dbt-labs/dbt-bigquery/blob/1.4.latest/CHANGELOG.md) - [1.3](https://github.com/dbt-labs/dbt-bigquery/blob/1.3.latest/CHANGELOG.md) - [1.2](https://github.com/dbt-labs/dbt-bigquery/blob/1.2.latest/CHANGELOG.md) - [1.1](https://github.com/dbt-labs/dbt-bigquery/blob/1.1.latest/CHANGELOG.md) diff --git a/dbt/adapters/bigquery/__version__.py b/dbt/adapters/bigquery/__version__.py index 27cfeecd9..219c289b1 100644 --- a/dbt/adapters/bigquery/__version__.py +++ b/dbt/adapters/bigquery/__version__.py @@ -1 +1 @@ -version = "1.4.0b1" +version = "1.5.0a1" diff --git a/setup.py b/setup.py index 52defb47d..a7b278203 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ def _dbt_core_version(plugin_version: str) -> str: package_name = "dbt-bigquery" -package_version = "1.4.0b1" +package_version = "1.5.0a1" dbt_core_version = _dbt_core_version(_dbt_bigquery_version()) description = """The BigQuery adapter plugin for dbt""" From b24e09b6036bc0137a1f6f1e27845733fd758437 Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Thu, 19 Jan 2023 00:10:26 -0600 Subject: [PATCH 06/16] add 1.3 and 1.4 to testing matrix (#467) --- .github/workflows/release-branch-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-branch-tests.yml b/.github/workflows/release-branch-tests.yml index 4e29dd0f9..282efdf2c 100644 --- a/.github/workflows/release-branch-tests.yml +++ b/.github/workflows/release-branch-tests.yml @@ -39,7 +39,7 @@ jobs: max-parallel: 1 fail-fast: false matrix: - branch: [1.0.latest, 1.1.latest, 1.2.latest, main] + branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, 1.4.latest, main] workflow_name: [main.yml, integration.yml] steps: From 1d6b428fa44fc9528d9c31736e991e72098d20ad Mon Sep 17 00:00:00 2001 From: Jeremy Cohen Date: Thu, 19 Jan 2023 13:48:01 +0100 Subject: [PATCH 07/16] Convert incremental on_schema_change tests (#469) * Convert incremental on_schema_change tests * Switch to dbt-core main --- .../test_incremental_on_schema_change.py | 154 +++++++++++++++++ .../test_incremental_predicates.py | 0 .../test_incremental_unique_id.py | 0 .../models/incremental_append_new_columns.sql | 29 ---- ...remental_append_new_columns_remove_one.sql | 28 --- ...l_append_new_columns_remove_one_target.sql | 19 -- .../incremental_append_new_columns_target.sql | 19 -- .../models/incremental_fail.sql | 19 -- .../models/incremental_ignore.sql | 19 -- .../models/incremental_ignore_target.sql | 15 -- .../models/incremental_sync_all_columns.sql | 31 ---- ...c_all_columns_dynamic_insert_overwrite.sql | 40 ----- .../incremental_sync_all_columns_target.sql | 20 --- ...ncremental_time_ingestion_partitioning.sql | 38 ---- ...tal_time_ingestion_partitioning_target.sql | 24 --- .../models/model_a.sql | 22 --- .../models/schema.yml | 80 --------- .../test_incremental_schema.py | 162 ------------------ .../tests/select_from_a.sql | 1 - ...ct_from_incremental_append_new_columns.sql | 1 - ...remental_append_new_columns_remove_one.sql | 1 - ...l_append_new_columns_remove_one_target.sql | 1 - ..._incremental_append_new_columns_target.sql | 1 - .../tests/select_from_incremental_ignore.sql | 1 - .../select_from_incremental_ignore_target.sql | 1 - ...lect_from_incremental_sync_all_columns.sql | 1 - ...om_incremental_sync_all_columns_target.sql | 1 - ...ncremental_time_ingestion_partitioning.sql | 1 - ...tal_time_ingestion_partitioning_target.sql | 1 - 29 files changed, 154 insertions(+), 576 deletions(-) create mode 100644 tests/functional/adapter/incremental/test_incremental_on_schema_change.py rename tests/functional/adapter/{ => incremental}/test_incremental_predicates.py (100%) rename tests/functional/adapter/{ => incremental}/test_incremental_unique_id.py (100%) delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_fail.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_ignore.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_dynamic_insert_overwrite.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning.sql delete mode 100644 tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning_target.sql delete mode 100644 tests/integration/incremental_schema_tests/models/model_a.sql delete mode 100644 tests/integration/incremental_schema_tests/models/schema.yml delete mode 100644 tests/integration/incremental_schema_tests/test_incremental_schema.py delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_a.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning.sql delete mode 100644 tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning_target.sql diff --git a/tests/functional/adapter/incremental/test_incremental_on_schema_change.py b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py new file mode 100644 index 000000000..882b378a2 --- /dev/null +++ b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py @@ -0,0 +1,154 @@ +import pytest + +from dbt.tests.adapter.incremental.test_incremental_on_schema_change import ( + BaseIncrementalOnSchemaChangeSetup, + BaseIncrementalOnSchemaChange, +) + +from dbt.tests.adapter.incremental.fixtures import ( + _MODELS__A, + _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, +) + + +class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange): + pass + + +_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_DYNAMIC_INSERT_OVERWRITE = """ +{{ + config( + materialized='incremental', + unique_key='id', + on_schema_change='sync_all_columns', + partition_by={ + "field": "id", + "data_type": "int64", + "range": { + "start": 1, + "end": 6, + "interval": 1 + } + }, + incremental_strategy='insert_overwrite' + ) +}} + +WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) + +{% set string_type = 'string' %} + +{% if is_incremental() %} + +SELECT id, + cast(field1 as {{string_type}}) as field1, + cast(field3 as {{string_type}}) as field3, -- to validate new fields + cast(field4 as {{string_type}}) AS field4 -- to validate new fields + +FROM source_data WHERE id > _dbt_max_partition + +{% else %} + +select id, + cast(field1 as {{string_type}}) as field1, + cast(field2 as {{string_type}}) as field2 + +from source_data where id <= 3 + +{% endif %} +""" + +_MODELS__INCREMENTAL_TIME_INGESTION_PARTITIONING = """ + +{{ + config( + materialized="incremental", + incremental_strategy='insert_overwrite', + partition_by={ + "field": "date_hour", + "data_type": "datetime", + "granularity": "hour", + "time_ingestion_partitioning": true + } + ) +}} + + +with data as ( + + {% if not is_incremental() %} + + select 1 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 2 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 3 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 4 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour + + {% else %} + + -- we want to overwrite the 4 records in the 2020-01-01 01:00:00 partition + -- with the 2 records below, but add two more in the 2020-01-00 02:00:00 partition + select 10 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 20 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 30 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour union all + select 40 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour + + {% endif %} + +) + +select * from data +""" + +_MODELS__INCREMENTAL_TIME_INGESTION_PARTITIONING_TARGET = """ +{{ + config( + materialized="incremental", + partition_by={ + "field": "date_hour", + "data_type": "datetime", + "granularity": "hour", + "time_ingestion_partitioning": true + } + ) +}} + +{% if not is_incremental() %} + + select 10 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 30 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour + +{% else %} + + select 20 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all + select 40 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour + +{% endif %} +""" + +class TestIncrementalOnSchemaChangeBigQuerySpecific(BaseIncrementalOnSchemaChangeSetup): + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": _MODELS__A, + "incremental_sync_all_columns_dynamic_insert_overwrite.sql": + _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_DYNAMIC_INSERT_OVERWRITE, + "incremental_sync_all_columns_target.sql": + _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, + "incremental_time_ingestion_partitioning.sql": + _MODELS__INCREMENTAL_TIME_INGESTION_PARTITIONING, + "incremental_time_ingestion_partitioning_target.sql": + _MODELS__INCREMENTAL_TIME_INGESTION_PARTITIONING_TARGET, + } + + def test_run_incremental_sync_all_columns_dynamic_insert_overwrite(self, project): + select = 'model_a incremental_sync_all_columns_dynamic_insert_overwrite incremental_sync_all_columns_target' + compare_source = 'incremental_sync_all_columns_dynamic_insert_overwrite' + compare_target = 'incremental_sync_all_columns_target' + self.run_twice_and_assert(select, compare_source, compare_target, project) + + # TODO: this test was added here, but it doesn't actually use 'on_schema_change' + def test_run_incremental_time_ingestion_partitioning(self, project): + select = 'model_a incremental_time_ingestion_partitioning incremental_time_ingestion_partitioning_target' + compare_source = 'incremental_time_ingestion_partitioning' + compare_target = 'incremental_time_ingestion_partitioning_target' + self.run_twice_and_assert(select, compare_source, compare_target, project) diff --git a/tests/functional/adapter/test_incremental_predicates.py b/tests/functional/adapter/incremental/test_incremental_predicates.py similarity index 100% rename from tests/functional/adapter/test_incremental_predicates.py rename to tests/functional/adapter/incremental/test_incremental_predicates.py diff --git a/tests/functional/adapter/test_incremental_unique_id.py b/tests/functional/adapter/incremental/test_incremental_unique_id.py similarity index 100% rename from tests/functional/adapter/test_incremental_unique_id.py rename to tests/functional/adapter/incremental/test_incremental_unique_id.py diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql deleted file mode 100644 index 351a397b9..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns.sql +++ /dev/null @@ -1,29 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='append_new_columns' - ) -}} - -{% set string_type = 'string' %} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2, - cast(field3 as {{string_type}}) as field3, - cast(field4 as {{string_type}}) as field4 -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 -FROM source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql deleted file mode 100644 index 2ff6c6f48..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one.sql +++ /dev/null @@ -1,28 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='append_new_columns' - ) -}} - -{% set string_type = 'string' %} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field3 as {{string_type}}) as field3, - cast(field4 as {{string_type}}) as field4 -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 -FROM source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql deleted file mode 100644 index c70029d9b..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_remove_one_target.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config(materialized='table') -}} - -{% set string_type = 'string' %} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id, - cast(field1 as {{string_type}}) as field1, - cast(CASE WHEN id > 3 THEN NULL ELSE field2 END as {{string_type}}) AS field2, - cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3, - cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql b/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql deleted file mode 100644 index 8f65ed71a..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_append_new_columns_target.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config(materialized='table') -}} - -{% set string_type = 'string' %} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id - ,cast(field1 as {{string_type}}) as field1 - ,cast(field2 as {{string_type}}) as field2 - ,cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3 - ,cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_fail.sql b/tests/integration/incremental_schema_tests/models/incremental_fail.sql deleted file mode 100644 index 590f5b56d..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_fail.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='fail' - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, field1, field2 FROM source_data - -{% else %} - -SELECT id, field1, field3 FROm source_data - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_ignore.sql b/tests/integration/incremental_schema_tests/models/incremental_ignore.sql deleted file mode 100644 index 51dee6022..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_ignore.sql +++ /dev/null @@ -1,19 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='ignore' - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% if is_incremental() %} - -SELECT id, field1, field2, field3, field4 FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -SELECT id, field1, field2 FROM source_data LIMIT 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql b/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql deleted file mode 100644 index 92d4564e0..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_ignore_target.sql +++ /dev/null @@ -1,15 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -select id - ,field1 - ,field2 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql b/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql deleted file mode 100644 index 9c79d649c..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns.sql +++ /dev/null @@ -1,31 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='sync_all_columns' - - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% set string_type = 'string' %} - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field3 as {{string_type}}) as field3, -- to validate new fields - cast(field4 as {{string_type}}) AS field4 -- to validate new fields - -FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) - -{% else %} - -select id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 - -from source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_dynamic_insert_overwrite.sql b/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_dynamic_insert_overwrite.sql deleted file mode 100644 index d82e683a3..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_dynamic_insert_overwrite.sql +++ /dev/null @@ -1,40 +0,0 @@ -{{ - config( - materialized='incremental', - unique_key='id', - on_schema_change='sync_all_columns', - partition_by={ - "field": "id", - "data_type": "int64", - "range": { - "start": 1, - "end": 6, - "interval": 1 - } - }, - incremental_strategy='insert_overwrite' - ) -}} - -WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) - -{% set string_type = 'string' %} - -{% if is_incremental() %} - -SELECT id, - cast(field1 as {{string_type}}) as field1, - cast(field3 as {{string_type}}) as field3, -- to validate new fields - cast(field4 as {{string_type}}) AS field4 -- to validate new fields - -FROM source_data WHERE id > _dbt_max_partition - -{% else %} - -select id, - cast(field1 as {{string_type}}) as field1, - cast(field2 as {{string_type}}) as field2 - -from source_data where id <= 3 - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql b/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql deleted file mode 100644 index 2fcd88121..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_sync_all_columns_target.sql +++ /dev/null @@ -1,20 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select * from {{ ref('model_a') }} - -) - -{% set string_type = 'string' %} - -select id - ,cast(field1 as {{string_type}}) as field1 - --,field2 - ,cast(case when id <= 3 then null else field3 end as {{string_type}}) as field3 - ,cast(case when id <= 3 then null else field4 end as {{string_type}}) as field4 - -from source_data -order by id \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning.sql b/tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning.sql deleted file mode 100644 index ce064b33c..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning.sql +++ /dev/null @@ -1,38 +0,0 @@ - -{{ - config( - materialized="incremental", - incremental_strategy='insert_overwrite', - partition_by={ - "field": "date_hour", - "data_type": "datetime", - "granularity": "hour", - "time_ingestion_partitioning": true - } - ) -}} - - -with data as ( - - {% if not is_incremental() %} - - select 1 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all - select 2 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all - select 3 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all - select 4 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour - - {% else %} - - -- we want to overwrite the 4 records in the 2020-01-01 01:00:00 partition - -- with the 2 records below, but add two more in the 2020-01-00 02:00:00 partition - select 10 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all - select 20 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all - select 30 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour union all - select 40 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour - - {% endif %} - -) - -select * from data diff --git a/tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning_target.sql b/tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning_target.sql deleted file mode 100644 index 2f25229de..000000000 --- a/tests/integration/incremental_schema_tests/models/incremental_time_ingestion_partitioning_target.sql +++ /dev/null @@ -1,24 +0,0 @@ - -{{ - config( - materialized="incremental", - partition_by={ - "field": "date_hour", - "data_type": "datetime", - "granularity": "hour", - "time_ingestion_partitioning": true - } - ) -}} - -{% if not is_incremental() %} - - select 10 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all - select 30 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour - -{% else %} - - select 20 as id, cast('2020-01-01 01:00:00' as datetime) as date_hour union all - select 40 as id, cast('2020-01-01 02:00:00' as datetime) as date_hour - -{% endif %} \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/model_a.sql b/tests/integration/incremental_schema_tests/models/model_a.sql deleted file mode 100644 index 2a0b2ddaf..000000000 --- a/tests/integration/incremental_schema_tests/models/model_a.sql +++ /dev/null @@ -1,22 +0,0 @@ -{{ - config(materialized='table') -}} - -with source_data as ( - - select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4 - union all select 2 as id, 'ccc' as field1, 'ddd' as field2, 222 as field3, 'UUU' as field4 - union all select 3 as id, 'eee' as field1, 'fff' as field2, 333 as field3, 'VVV' as field4 - union all select 4 as id, 'ggg' as field1, 'hhh' as field2, 444 as field3, 'WWW' as field4 - union all select 5 as id, 'iii' as field1, 'jjj' as field2, 555 as field3, 'XXX' as field4 - union all select 6 as id, 'kkk' as field1, 'lll' as field2, 666 as field3, 'YYY' as field4 - -) - -select id - ,field1 - ,field2 - ,field3 - ,field4 - -from source_data \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/models/schema.yml b/tests/integration/incremental_schema_tests/models/schema.yml deleted file mode 100644 index fd3136102..000000000 --- a/tests/integration/incremental_schema_tests/models/schema.yml +++ /dev/null @@ -1,80 +0,0 @@ -version: 2 - -models: - - name: model_a - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_ignore - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_ignore_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_remove_one - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_append_new_columns_remove_one_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_sync_all_columns - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_sync_all_columns_target - columns: - - name: id - tags: [column_leveL_tag] - tests: - - unique - - - name: incremental_time_ingestion_partitioning - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - - - name: incremental_time_ingestion_partitioning_target - columns: - - name: id - tags: [column_level_tag] - tests: - - unique - diff --git a/tests/integration/incremental_schema_tests/test_incremental_schema.py b/tests/integration/incremental_schema_tests/test_incremental_schema.py deleted file mode 100644 index 9592c500f..000000000 --- a/tests/integration/incremental_schema_tests/test_incremental_schema.py +++ /dev/null @@ -1,162 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestSelectionExpansion(DBTIntegrationTest): - @property - def schema(self): - return "test_incremental_schema" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - "config-version": 2, - "test-paths": ["tests"] - } - - def list_tests_and_assert(self, include, exclude, expected_tests): - list_args = ['ls', '--resource-type', 'test'] - if include: - list_args.extend(('--select', include)) - if exclude: - list_args.extend(('--exclude', exclude)) - listed = self.run_dbt(list_args) - print(listed) - assert len(listed) == len(expected_tests) - test_names = [name.split('.')[-1] for name in listed] - assert sorted(test_names) == sorted(expected_tests) - - def run_tests_and_assert( - self, include, exclude, expected_tests, compare_source, compare_target - ): - - run_args = ['run'] - if include: - run_args.extend(('--models', include)) - results_one = self.run_dbt(run_args) - results_two = self.run_dbt(run_args) - - self.assertEqual(len(results_one), 3) - self.assertEqual(len(results_two), 3) - - test_args = ['test'] - if include: - test_args.extend(('--models', include)) - if exclude: - test_args.extend(('--exclude', exclude)) - - results = self.run_dbt(test_args) - tests_run = [r.node.name for r in results] - assert len(tests_run) == len(expected_tests) - assert sorted(tests_run) == sorted(expected_tests) - self.assertTablesEqual(compare_source, compare_target) - - def run_incremental_ignore(self): - select = 'model_a incremental_ignore incremental_ignore_target' - compare_source = 'incremental_ignore' - compare_target = 'incremental_ignore_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_ignore', - 'select_from_incremental_ignore_target', - 'unique_model_a_id', - 'unique_incremental_ignore_id', - 'unique_incremental_ignore_target_id' - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_append_new_columns(self): - select = 'model_a incremental_append_new_columns incremental_append_new_columns_target' - compare_source = 'incremental_append_new_columns' - compare_target = 'incremental_append_new_columns_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_append_new_columns', - 'select_from_incremental_append_new_columns_target', - 'unique_model_a_id', - 'unique_incremental_append_new_columns_id', - 'unique_incremental_append_new_columns_target_id' - ] - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_append_new_columns_remove_one(self): - select = 'model_a incremental_append_new_columns_remove_one incremental_append_new_columns_remove_one_target' - compare_source = 'incremental_append_new_columns_remove_one' - compare_target = 'incremental_append_new_columns_remove_one_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_append_new_columns_remove_one', - 'select_from_incremental_append_new_columns_remove_one_target', - 'unique_model_a_id', - 'unique_incremental_append_new_columns_remove_one_id', - 'unique_incremental_append_new_columns_remove_one_target_id' - ] - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_sync_all_columns(self): - select = 'model_a incremental_sync_all_columns incremental_sync_all_columns_target' - compare_source = 'incremental_sync_all_columns' - compare_target = 'incremental_sync_all_columns_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_sync_all_columns', - 'select_from_incremental_sync_all_columns_target', - 'unique_model_a_id', - 'unique_incremental_sync_all_columns_id', - 'unique_incremental_sync_all_columns_target_id' - ] - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - def run_incremental_fail_on_schema_change(self): - select = 'model_a incremental_fail' - results_one = self.run_dbt(['run', '--models', select, '--full-refresh']) - results_two = self.run_dbt(['run', '--models', select], expect_pass = False) - self.assertIn('Compilation Error', results_two[1].message) - - def run_incremental_time_ingestion_partitioning(self): - select = 'model_a incremental_time_ingestion_partitioning incremental_time_ingestion_partitioning_target' - compare_source = 'incremental_time_ingestion_partitioning' - compare_target = 'incremental_time_ingestion_partitioning_target' - exclude = None - expected = [ - 'select_from_a', - 'select_from_incremental_time_ingestion_partitioning', - 'select_from_incremental_time_ingestion_partitioning_target', - 'unique_model_a_id', - 'unique_incremental_time_ingestion_partitioning_id', - 'unique_incremental_time_ingestion_partitioning_target_id' - ] - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected, compare_source, compare_target) - - @use_profile('bigquery') - def test__bigquery__run_incremental_ignore(self): - self.run_incremental_ignore() - - @use_profile('bigquery') - def test__bigquery__run_incremental_append_new_columns(self): - self.run_incremental_append_new_columns() - self.run_incremental_append_new_columns_remove_one() - - @use_profile('bigquery') - def test__bigquery__run_incremental_sync_all_columns(self): - self.run_incremental_sync_all_columns() - - @use_profile('bigquery') - def test__bigquery__run_incremental_fail_on_schema_change(self): - self.run_incremental_fail_on_schema_change() - - @use_profile('bigquery') - def test__bigquery__run_incremental_time_ingestion_partitioning(self): - self.run_incremental_time_ingestion_partitioning() \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_a.sql b/tests/integration/incremental_schema_tests/tests/select_from_a.sql deleted file mode 100644 index 3dc8f2857..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_a.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('model_a') }} where false diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql deleted file mode 100644 index 947e84588..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql deleted file mode 100644 index 06d52c6d6..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_remove_one') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql deleted file mode 100644 index 07d2412b0..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_remove_one_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_remove_one_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql deleted file mode 100644 index 8b86eddd7..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_append_new_columns_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_append_new_columns_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql deleted file mode 100644 index d565c8464..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_ignore') }} where false diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql deleted file mode 100644 index 35d535c5c..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_ignore_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_ignore_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql deleted file mode 100644 index aedc9f803..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_sync_all_columns') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql deleted file mode 100644 index 4b703c988..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_sync_all_columns_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_sync_all_columns_target') }} where false \ No newline at end of file diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning.sql deleted file mode 100644 index 85e653c11..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_time_ingestion_partitioning') }} where false diff --git a/tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning_target.sql b/tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning_target.sql deleted file mode 100644 index e2533dff7..000000000 --- a/tests/integration/incremental_schema_tests/tests/select_from_incremental_time_ingestion_partitioning_target.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('incremental_time_ingestion_partitioning_target') }} where false From 38fb796bf213217220ed4bcc6ce237b0b25d507c Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Thu, 26 Jan 2023 01:19:33 -0800 Subject: [PATCH 08/16] Remove test for CT-1629 in dbt-core. (#473) Co-authored-by: Mila Page --- .../column_quoting/models-unquoted/model.sql | 12 --- .../column_quoting/models/model.sql | 12 --- .../integration/column_quoting/seeds/seed.csv | 4 - .../column_quoting/test_column_quotes.py | 78 ------------------- 4 files changed, 106 deletions(-) delete mode 100644 tests/integration/column_quoting/models-unquoted/model.sql delete mode 100644 tests/integration/column_quoting/models/model.sql delete mode 100644 tests/integration/column_quoting/seeds/seed.csv delete mode 100644 tests/integration/column_quoting/test_column_quotes.py diff --git a/tests/integration/column_quoting/models-unquoted/model.sql b/tests/integration/column_quoting/models-unquoted/model.sql deleted file mode 100644 index a7fefd127..000000000 --- a/tests/integration/column_quoting/models-unquoted/model.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% set col_a = '`col_a`' %} -{% set col_b = '`col_b`' %} - -{{config( - materialized = 'incremental', - unique_key = col_a, - incremental_strategy = var('strategy') - )}} - -select -{{ col_a }}, {{ col_b }} -from {{ref('seed')}} diff --git a/tests/integration/column_quoting/models/model.sql b/tests/integration/column_quoting/models/model.sql deleted file mode 100644 index 6db19b06d..000000000 --- a/tests/integration/column_quoting/models/model.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% set col_a = '`col_A`' %} -{% set col_b = '`col_B`' %} - -{{config( - materialized = 'incremental', - unique_key = col_a, - incremental_strategy = var('strategy') - )}} - -select -{{ col_a }}, {{ col_b }} -from {{ref('seed')}} diff --git a/tests/integration/column_quoting/seeds/seed.csv b/tests/integration/column_quoting/seeds/seed.csv deleted file mode 100644 index d4a1e26ee..000000000 --- a/tests/integration/column_quoting/seeds/seed.csv +++ /dev/null @@ -1,4 +0,0 @@ -col_A,col_B -1,2 -3,4 -5,6 diff --git a/tests/integration/column_quoting/test_column_quotes.py b/tests/integration/column_quoting/test_column_quotes.py deleted file mode 100644 index c484317ac..000000000 --- a/tests/integration/column_quoting/test_column_quotes.py +++ /dev/null @@ -1,78 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import os - - -class BaseColumnQuotingTest(DBTIntegrationTest): - def column_quoting(self): - raise NotImplementedError('column_quoting not implemented') - - @property - def schema(self): - return 'dbt_column_quoting' - - @staticmethod - def dir(value): - return os.path.normpath(value) - - def _run_columnn_quotes(self, strategy='delete+insert'): - strategy_vars = '{{"strategy": "{}"}}'.format(strategy) - self.run_dbt(['seed', '--vars', strategy_vars]) - self.run_dbt(['run', '--vars', strategy_vars]) - self.run_dbt(['run', '--vars', strategy_vars]) - - -class TestColumnQuotingDefault(BaseColumnQuotingTest): - @property - def project_config(self): - return { - 'config-version': 2 - } - - @property - def models(self): - return self.dir('models') - - def run_dbt(self, *args, **kwargs): - return super().run_dbt(*args, **kwargs) - - @use_profile('bigquery') - def test_bigquery_column_quotes(self): - self._run_columnn_quotes(strategy='merge') - - -class TestColumnQuotingDisabled(BaseColumnQuotingTest): - @property - def models(self): - return self.dir('models-unquoted') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'quote_columns': False, - }, - } - - @use_profile('bigquery') - def test_bigquery_column_quotes_merged(self): - self._run_columnn_quotes(strategy='merge') - - -class TestColumnQuotingEnabled(BaseColumnQuotingTest): - @property - def models(self): - return self.dir('models') - - @property - def project_config(self): - return { - 'config-version': 2, - 'seeds': { - 'quote_columns': True, - }, - } - - @use_profile('bigquery') - def test_bigquery_column_quotes_merged(self): - self._run_columnn_quotes(strategy='merge') From e4ce8c1abf097d78f5a8b2709fa1844c9d85a579 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Thu, 26 Jan 2023 11:19:25 -0800 Subject: [PATCH 09/16] remove sources integration tests (#479) --- .../sources_test/error_models/model.sql | 1 - .../sources_test/error_models/schema.yml | 12 -- .../sources_test/filtered_models/schema.yml | 18 -- .../integration/sources_test/macros/macro.sql | 14 -- .../malformed_models/descendant_model.sql | 1 - .../sources_test/malformed_models/schema.yml | 14 -- .../malformed_schema_tests/model.sql | 1 - .../malformed_schema_tests/schema.yml | 14 -- .../sources_test/models/descendant_model.sql | 1 - .../sources_test/models/ephemeral_model.sql | 3 - .../models/multi_source_model.sql | 2 - .../models/nonsource_descendant.sql | 1 - .../sources_test/models/schema.yml | 77 -------- .../sources_test/models/view_model.sql | 3 - tests/integration/sources_test/seed.sql | 113 ----------- .../seeds/expected_multi_source.csv | 4 - .../sources_test/seeds/other_source_table.csv | 4 - .../sources_test/seeds/other_table.csv | 4 - .../integration/sources_test/seeds/source.csv | 101 ---------- .../integration/sources_test/test_sources.py | 183 ------------------ 20 files changed, 571 deletions(-) delete mode 100644 tests/integration/sources_test/error_models/model.sql delete mode 100644 tests/integration/sources_test/error_models/schema.yml delete mode 100644 tests/integration/sources_test/filtered_models/schema.yml delete mode 100644 tests/integration/sources_test/macros/macro.sql delete mode 100644 tests/integration/sources_test/malformed_models/descendant_model.sql delete mode 100644 tests/integration/sources_test/malformed_models/schema.yml delete mode 100644 tests/integration/sources_test/malformed_schema_tests/model.sql delete mode 100644 tests/integration/sources_test/malformed_schema_tests/schema.yml delete mode 100644 tests/integration/sources_test/models/descendant_model.sql delete mode 100644 tests/integration/sources_test/models/ephemeral_model.sql delete mode 100644 tests/integration/sources_test/models/multi_source_model.sql delete mode 100644 tests/integration/sources_test/models/nonsource_descendant.sql delete mode 100644 tests/integration/sources_test/models/schema.yml delete mode 100644 tests/integration/sources_test/models/view_model.sql delete mode 100644 tests/integration/sources_test/seed.sql delete mode 100644 tests/integration/sources_test/seeds/expected_multi_source.csv delete mode 100644 tests/integration/sources_test/seeds/other_source_table.csv delete mode 100644 tests/integration/sources_test/seeds/other_table.csv delete mode 100644 tests/integration/sources_test/seeds/source.csv delete mode 100644 tests/integration/sources_test/test_sources.py diff --git a/tests/integration/sources_test/error_models/model.sql b/tests/integration/sources_test/error_models/model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/error_models/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/error_models/schema.yml b/tests/integration/sources_test/error_models/schema.yml deleted file mode 100644 index 69cf1f304..000000000 --- a/tests/integration/sources_test/error_models/schema.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - schema: invalid - tables: - - name: test_table - identifier: source - loaded_at_field: updated_at diff --git a/tests/integration/sources_test/filtered_models/schema.yml b/tests/integration/sources_test/filtered_models/schema.yml deleted file mode 100644 index edad7f6ec..000000000 --- a/tests/integration/sources_test/filtered_models/schema.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - filter: id > 1 - schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" - quoting: - identifier: True - tables: - - name: test_table - identifier: source - loaded_at_field: updated_at - freshness: - error_after: {count: 18, period: hour} - filter: id > 101 diff --git a/tests/integration/sources_test/macros/macro.sql b/tests/integration/sources_test/macros/macro.sql deleted file mode 100644 index a607a6e4c..000000000 --- a/tests/integration/sources_test/macros/macro.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro override_me() -%} - {{ exceptions.raise_compiler_error('this is a bad macro') }} -{%- endmacro %} - -{% macro happy_little_macro() -%} - {{ override_me() }} -{%- endmacro %} - - -{% macro vacuum_source(source_name, table_name) -%} - {% call statement('stmt', auto_begin=false, fetch_result=false) %} - vacuum {{ source(source_name, table_name) }} - {% endcall %} -{%- endmacro %} diff --git a/tests/integration/sources_test/malformed_models/descendant_model.sql b/tests/integration/sources_test/malformed_models/descendant_model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/malformed_models/descendant_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/malformed_models/schema.yml b/tests/integration/sources_test/malformed_models/schema.yml deleted file mode 100644 index 544d18d65..000000000 --- a/tests/integration/sources_test/malformed_models/schema.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: 2 -sources: - - name: test_source - loader: custom - schema: "{{ var('test_run_schema') }}" - tables: - - name: test_table - identifier: source - tests: - - relationships: - # this is invalid (list of 3 1-key dicts instead of a single 3-key dict) - - column_name: favorite_color - - to: ref('descendant_model') - - field: favorite_color diff --git a/tests/integration/sources_test/malformed_schema_tests/model.sql b/tests/integration/sources_test/malformed_schema_tests/model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/malformed_schema_tests/model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/malformed_schema_tests/schema.yml b/tests/integration/sources_test/malformed_schema_tests/schema.yml deleted file mode 100644 index d72ab2eee..000000000 --- a/tests/integration/sources_test/malformed_schema_tests/schema.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: 2 -sources: - - name: test_source - schema: "{{ var('test_run_schema') }}" - tables: - - name: test_table - identifier: source - columns: - - name: favorite_color - tests: - - relationships: - to: ref('model') - # this will get rendered as its literal - field: "{{ 'favorite' ~ 'color' }}" diff --git a/tests/integration/sources_test/models/descendant_model.sql b/tests/integration/sources_test/models/descendant_model.sql deleted file mode 100644 index 55bbcba67..000000000 --- a/tests/integration/sources_test/models/descendant_model.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ source('test_source', 'test_table') }} diff --git a/tests/integration/sources_test/models/ephemeral_model.sql b/tests/integration/sources_test/models/ephemeral_model.sql deleted file mode 100644 index 8de35cd3e..000000000 --- a/tests/integration/sources_test/models/ephemeral_model.sql +++ /dev/null @@ -1,3 +0,0 @@ -{{ config(materialized='ephemeral') }} - -select 1 as id diff --git a/tests/integration/sources_test/models/multi_source_model.sql b/tests/integration/sources_test/models/multi_source_model.sql deleted file mode 100644 index e310206b0..000000000 --- a/tests/integration/sources_test/models/multi_source_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -select * from {{ source('test_source', 'other_test_table')}} - join {{ source('other_source', 'test_table')}} using (id) diff --git a/tests/integration/sources_test/models/nonsource_descendant.sql b/tests/integration/sources_test/models/nonsource_descendant.sql deleted file mode 100644 index 97f2151c7..000000000 --- a/tests/integration/sources_test/models/nonsource_descendant.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ schema }}.source diff --git a/tests/integration/sources_test/models/schema.yml b/tests/integration/sources_test/models/schema.yml deleted file mode 100644 index f02eb1345..000000000 --- a/tests/integration/sources_test/models/schema.yml +++ /dev/null @@ -1,77 +0,0 @@ -version: 2 -models: - - name: descendant_model - columns: - - name: favorite_color - tests: - - relationships: - to: source('test_source', 'test_table') - field: favorite_color - -sources: - - name: test_source - loader: custom - freshness: - warn_after: {count: 10, period: hour} - error_after: {count: 1, period: day} - schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" - quoting: - identifier: True - tags: - - my_test_source_tag - tables: - - name: test_table - identifier: source - loaded_at_field: "{{ var('test_loaded_at') | as_text }}" - freshness: - error_after: {count: 18, period: hour} - tags: - - my_test_source_table_tag - columns: - - name: favorite_color - description: The favorite color - - name: id - description: The user ID - tests: - - unique - - not_null - tags: - - id_column - - name: first_name - description: The first name of the user - tests: [] - - name: email - description: The email address of the user - - name: ip_address - description: The last IP address the user logged in from - - name: updated_at - description: The last update time for this user - tests: - - relationships: - # do this as a table-level test, just to test out that aspect - column_name: favorite_color - to: ref('descendant_model') - field: favorite_color - - name: other_test_table - identifier: other_table - columns: - - name: id - tests: - - not_null - - unique - tags: - - id_column - - name: disabled_test_table - freshness: null - loaded_at_field: "{{ var('test_loaded_at') | as_text }}" - - name: other_source - schema: "{{ var('test_run_schema') }}" - quoting: - identifier: True - tables: - - name: test_table - identifier: other_source_table - - name: external_source - schema: "{{ var('test_run_alt_schema', var('test_run_schema')) }}" - tables: - - name: table diff --git a/tests/integration/sources_test/models/view_model.sql b/tests/integration/sources_test/models/view_model.sql deleted file mode 100644 index ecb330804..000000000 --- a/tests/integration/sources_test/models/view_model.sql +++ /dev/null @@ -1,3 +0,0 @@ -{# See here: https://github.com/dbt-labs/dbt/pull/1729 #} - -select * from {{ ref('ephemeral_model') }} diff --git a/tests/integration/sources_test/seed.sql b/tests/integration/sources_test/seed.sql deleted file mode 100644 index 40110b990..000000000 --- a/tests/integration/sources_test/seed.sql +++ /dev/null @@ -1,113 +0,0 @@ -create table {schema}.seed_expected ( - favorite_color TEXT, - id INTEGER, - first_name TEXT, - email TEXT, - ip_address TEXT, - updated_at TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed_expected - ("favorite_color","id","first_name","email","ip_address","updated_at") -VALUES - ('blue',1,'Larry','lking0@miitbeian.gov.cn','''69.135.206.194''','2008-09-12 19:08:31'), - ('blue',2,'Larry','lperkins1@toplist.cz','''64.210.133.162''','1978-05-09 04:15:14'), - ('blue',3,'Anna','amontgomery2@miitbeian.gov.cn','''168.104.64.114''','2011-10-16 04:07:57'), - ('blue',4,'Sandra','sgeorge3@livejournal.com','''229.235.252.98''','1973-07-19 10:52:43'), - ('blue',5,'Fred','fwoods4@google.cn','''78.229.170.124''','2012-09-30 16:38:29'), - ('blue',6,'Stephen','shanson5@livejournal.com','''182.227.157.105''','1995-11-07 21:40:50'), - ('blue',7,'William','wmartinez6@upenn.edu','''135.139.249.50''','1982-09-05 03:11:59'), - ('blue',8,'Jessica','jlong7@hao123.com','''203.62.178.210''','1991-10-16 11:03:15'), - ('blue',9,'Douglas','dwhite8@tamu.edu','''178.187.247.1''','1979-10-01 09:49:48'), - ('blue',10,'Lisa','lcoleman9@nydailynews.com','''168.234.128.249''','2011-05-26 07:45:49'), - ('blue',11,'Ralph','rfieldsa@home.pl','''55.152.163.149''','1972-11-18 19:06:11'), - ('blue',12,'Louise','lnicholsb@samsung.com','''141.116.153.154''','2014-11-25 20:56:14'), - ('blue',13,'Clarence','cduncanc@sfgate.com','''81.171.31.133''','2011-11-17 07:02:36'), - ('blue',14,'Daniel','dfranklind@omniture.com','''8.204.211.37''','1980-09-13 00:09:04'), - ('blue',15,'Katherine','klanee@auda.org.au','''176.96.134.59''','1997-08-22 19:36:56'), - ('blue',16,'Billy','bwardf@wikia.com','''214.108.78.85''','2003-10-19 02:14:47'), - ('blue',17,'Annie','agarzag@ocn.ne.jp','''190.108.42.70''','1988-10-28 15:12:35'), - ('blue',18,'Shirley','scolemanh@fastcompany.com','''109.251.164.84''','1988-08-24 10:50:57'), - ('blue',19,'Roger','rfrazieri@scribd.com','''38.145.218.108''','1985-12-31 15:17:15'), - ('blue',20,'Lillian','lstanleyj@goodreads.com','''47.57.236.17''','1970-06-08 02:09:05'), - ('blue',21,'Aaron','arodriguezk@nps.gov','''205.245.118.221''','1985-10-11 23:07:49'), - ('blue',22,'Patrick','pparkerl@techcrunch.com','''19.8.100.182''','2006-03-29 12:53:56'), - ('blue',23,'Phillip','pmorenom@intel.com','''41.38.254.103''','2011-11-07 15:35:43'), - ('blue',24,'Henry','hgarcian@newsvine.com','''1.191.216.252''','2008-08-28 08:30:44'), - ('blue',25,'Irene','iturnero@opera.com','''50.17.60.190''','1994-04-01 07:15:02'), - ('blue',26,'Andrew','adunnp@pen.io','''123.52.253.176''','2000-11-01 06:03:25'), - ('blue',27,'David','dgutierrezq@wp.com','''238.23.203.42''','1988-01-25 07:29:18'), - ('blue',28,'Henry','hsanchezr@cyberchimps.com','''248.102.2.185''','1983-01-01 13:36:37'), - ('blue',29,'Evelyn','epetersons@gizmodo.com','''32.80.46.119''','1979-07-16 17:24:12'), - ('blue',30,'Tammy','tmitchellt@purevolume.com','''249.246.167.88''','2001-04-03 10:00:23'), - ('blue',31,'Jacqueline','jlittleu@domainmarket.com','''127.181.97.47''','1986-02-11 21:35:50'), - ('blue',32,'Earl','eortizv@opera.com','''166.47.248.240''','1996-07-06 08:16:27'), - ('blue',33,'Juan','jgordonw@sciencedirect.com','''71.77.2.200''','1987-01-31 03:46:44'), - ('blue',34,'Diane','dhowellx@nyu.edu','''140.94.133.12''','1994-06-11 02:30:05'), - ('blue',35,'Randy','rkennedyy@microsoft.com','''73.255.34.196''','2005-05-26 20:28:39'), - ('blue',36,'Janice','jriveraz@time.com','''22.214.227.32''','1990-02-09 04:16:52'), - ('blue',37,'Laura','lperry10@diigo.com','''159.148.145.73''','2015-03-17 05:59:25'), - ('blue',38,'Gary','gray11@statcounter.com','''40.193.124.56''','1970-01-27 10:04:51'), - ('blue',39,'Jesse','jmcdonald12@typepad.com','''31.7.86.103''','2009-03-14 08:14:29'), - ('blue',40,'Sandra','sgonzalez13@goodreads.com','''223.80.168.239''','1993-05-21 14:08:54'), - ('blue',41,'Scott','smoore14@archive.org','''38.238.46.83''','1980-08-30 11:16:56'), - ('blue',42,'Phillip','pevans15@cisco.com','''158.234.59.34''','2011-12-15 23:26:31'), - ('blue',43,'Steven','sriley16@google.ca','''90.247.57.68''','2011-10-29 19:03:28'), - ('blue',44,'Deborah','dbrown17@hexun.com','''179.125.143.240''','1995-04-10 14:36:07'), - ('blue',45,'Lori','lross18@ow.ly','''64.80.162.180''','1980-12-27 16:49:15'), - ('blue',46,'Sean','sjackson19@tumblr.com','''240.116.183.69''','1988-06-12 21:24:45'), - ('blue',47,'Terry','tbarnes1a@163.com','''118.38.213.137''','1997-09-22 16:43:19'), - ('blue',48,'Dorothy','dross1b@ebay.com','''116.81.76.49''','2005-02-28 13:33:24'), - ('blue',49,'Samuel','swashington1c@house.gov','''38.191.253.40''','1989-01-19 21:15:48'), - ('blue',50,'Ralph','rcarter1d@tinyurl.com','''104.84.60.174''','2007-08-11 10:21:49'), - ('green',51,'Wayne','whudson1e@princeton.edu','''90.61.24.102''','1983-07-03 16:58:12'), - ('green',52,'Rose','rjames1f@plala.or.jp','''240.83.81.10''','1995-06-08 11:46:23'), - ('green',53,'Louise','lcox1g@theglobeandmail.com','''105.11.82.145''','2016-09-19 14:45:51'), - ('green',54,'Kenneth','kjohnson1h@independent.co.uk','''139.5.45.94''','1976-08-17 11:26:19'), - ('green',55,'Donna','dbrown1i@amazon.co.uk','''19.45.169.45''','2006-05-27 16:51:40'), - ('green',56,'Johnny','jvasquez1j@trellian.com','''118.202.238.23''','1975-11-17 08:42:32'), - ('green',57,'Patrick','pramirez1k@tamu.edu','''231.25.153.198''','1997-08-06 11:51:09'), - ('green',58,'Helen','hlarson1l@prweb.com','''8.40.21.39''','1993-08-04 19:53:40'), - ('green',59,'Patricia','pspencer1m@gmpg.org','''212.198.40.15''','1977-08-03 16:37:27'), - ('green',60,'Joseph','jspencer1n@marriott.com','''13.15.63.238''','2005-07-23 20:22:06'), - ('green',61,'Phillip','pschmidt1o@blogtalkradio.com','''177.98.201.190''','1976-05-19 21:47:44'), - ('green',62,'Joan','jwebb1p@google.ru','''105.229.170.71''','1972-09-07 17:53:47'), - ('green',63,'Phyllis','pkennedy1q@imgur.com','''35.145.8.244''','2000-01-01 22:33:37'), - ('green',64,'Katherine','khunter1r@smh.com.au','''248.168.205.32''','1991-01-09 06:40:24'), - ('green',65,'Laura','lvasquez1s@wiley.com','''128.129.115.152''','1997-10-23 12:04:56'), - ('green',66,'Juan','jdunn1t@state.gov','''44.228.124.51''','2004-11-10 05:07:35'), - ('green',67,'Judith','jholmes1u@wiley.com','''40.227.179.115''','1977-08-02 17:01:45'), - ('green',68,'Beverly','bbaker1v@wufoo.com','''208.34.84.59''','2016-03-06 20:07:23'), - ('green',69,'Lawrence','lcarr1w@flickr.com','''59.158.212.223''','1988-09-13 06:07:21'), - ('green',70,'Gloria','gwilliams1x@mtv.com','''245.231.88.33''','1995-03-18 22:32:46'), - ('green',71,'Steven','ssims1y@cbslocal.com','''104.50.58.255''','2001-08-05 21:26:20'), - ('green',72,'Betty','bmills1z@arstechnica.com','''103.177.214.220''','1981-12-14 21:26:54'), - ('green',73,'Mildred','mfuller20@prnewswire.com','''151.158.8.130''','2000-04-19 10:13:55'), - ('green',74,'Donald','dday21@icq.com','''9.178.102.255''','1972-12-03 00:58:24'), - ('green',75,'Eric','ethomas22@addtoany.com','''85.2.241.227''','1992-11-01 05:59:30'), - ('green',76,'Joyce','jarmstrong23@sitemeter.com','''169.224.20.36''','1985-10-24 06:50:01'), - ('green',77,'Maria','mmartinez24@amazonaws.com','''143.189.167.135''','2005-10-05 05:17:42'), - ('green',78,'Harry','hburton25@youtube.com','''156.47.176.237''','1978-03-26 05:53:33'), - ('green',79,'Kevin','klawrence26@hao123.com','''79.136.183.83''','1994-10-12 04:38:52'), - ('green',80,'David','dhall27@prweb.com','''133.149.172.153''','1976-12-15 16:24:24'), - ('green',81,'Kathy','kperry28@twitter.com','''229.242.72.228''','1979-03-04 02:58:56'), - ('green',82,'Adam','aprice29@elegantthemes.com','''13.145.21.10''','1982-11-07 11:46:59'), - ('green',83,'Brandon','bgriffin2a@va.gov','''73.249.128.212''','2013-10-30 05:30:36'), - ('green',84,'Henry','hnguyen2b@discovery.com','''211.36.214.242''','1985-01-09 06:37:27'), - ('green',85,'Eric','esanchez2c@edublogs.org','''191.166.188.251''','2004-05-01 23:21:42'), - ('green',86,'Jason','jlee2d@jimdo.com','''193.92.16.182''','1973-01-08 09:05:39'), - ('green',87,'Diana','drichards2e@istockphoto.com','''19.130.175.245''','1994-10-05 22:50:49'), - ('green',88,'Andrea','awelch2f@abc.net.au','''94.155.233.96''','2002-04-26 08:41:44'), - ('green',89,'Louis','lwagner2g@miitbeian.gov.cn','''26.217.34.111''','2003-08-25 07:56:39'), - ('green',90,'Jane','jsims2h@seesaa.net','''43.4.220.135''','1987-03-20 20:39:04'), - ('green',91,'Larry','lgrant2i@si.edu','''97.126.79.34''','2000-09-07 20:26:19'), - ('green',92,'Louis','ldean2j@prnewswire.com','''37.148.40.127''','2011-09-16 20:12:14'), - ('green',93,'Jennifer','jcampbell2k@xing.com','''38.106.254.142''','1988-07-15 05:06:49'), - ('green',94,'Wayne','wcunningham2l@google.com.hk','''223.28.26.187''','2009-12-15 06:16:54'), - ('green',95,'Lori','lstevens2m@icq.com','''181.250.181.58''','1984-10-28 03:29:19'), - ('green',96,'Judy','jsimpson2n@marriott.com','''180.121.239.219''','1986-02-07 15:18:10'), - ('green',97,'Phillip','phoward2o@usa.gov','''255.247.0.175''','2002-12-26 08:44:45'), - ('green',98,'Gloria','gwalker2p@usa.gov','''156.140.7.128''','1997-10-04 07:58:58'), - ('green',99,'Paul','pjohnson2q@umn.edu','''183.59.198.197''','1991-11-14 12:33:55'), - ('green',100,'Frank','fgreene2r@blogspot.com','''150.143.68.121''','2010-06-12 23:55:39'); diff --git a/tests/integration/sources_test/seeds/expected_multi_source.csv b/tests/integration/sources_test/seeds/expected_multi_source.csv deleted file mode 100644 index de9c1c01d..000000000 --- a/tests/integration/sources_test/seeds/expected_multi_source.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name,color -1,Larry,blue -2,Curly,red -3,Moe,green diff --git a/tests/integration/sources_test/seeds/other_source_table.csv b/tests/integration/sources_test/seeds/other_source_table.csv deleted file mode 100644 index a92b2cb8e..000000000 --- a/tests/integration/sources_test/seeds/other_source_table.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,color -1,blue -2,red -3,green diff --git a/tests/integration/sources_test/seeds/other_table.csv b/tests/integration/sources_test/seeds/other_table.csv deleted file mode 100644 index 56bdda92b..000000000 --- a/tests/integration/sources_test/seeds/other_table.csv +++ /dev/null @@ -1,4 +0,0 @@ -id,first_name -1,Larry -2,Curly -3,Moe diff --git a/tests/integration/sources_test/seeds/source.csv b/tests/integration/sources_test/seeds/source.csv deleted file mode 100644 index a8f87412e..000000000 --- a/tests/integration/sources_test/seeds/source.csv +++ /dev/null @@ -1,101 +0,0 @@ -favorite_color,id,first_name,email,ip_address,updated_at -blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 -blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 -blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 -blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 -blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 -blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 -blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 -blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 -blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 -blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 -blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 -blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 -blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 -blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 -blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 -blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 -blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 -blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 -blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 -blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 -blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 -blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 -blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 -blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 -blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 -blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 -blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 -blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 -blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 -blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 -blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 -blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 -blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 -blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 -blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 -blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 -blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 -blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 -blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 -blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 -blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 -blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 -blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 -blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 -blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 -blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 -blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 -blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 -blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 -blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 -green,51,Wayne,whudson1e@princeton.edu,'90.61.24.102',1983-07-03 16:58:12 -green,52,Rose,rjames1f@plala.or.jp,'240.83.81.10',1995-06-08 11:46:23 -green,53,Louise,lcox1g@theglobeandmail.com,'105.11.82.145',2016-09-19 14:45:51 -green,54,Kenneth,kjohnson1h@independent.co.uk,'139.5.45.94',1976-08-17 11:26:19 -green,55,Donna,dbrown1i@amazon.co.uk,'19.45.169.45',2006-05-27 16:51:40 -green,56,Johnny,jvasquez1j@trellian.com,'118.202.238.23',1975-11-17 08:42:32 -green,57,Patrick,pramirez1k@tamu.edu,'231.25.153.198',1997-08-06 11:51:09 -green,58,Helen,hlarson1l@prweb.com,'8.40.21.39',1993-08-04 19:53:40 -green,59,Patricia,pspencer1m@gmpg.org,'212.198.40.15',1977-08-03 16:37:27 -green,60,Joseph,jspencer1n@marriott.com,'13.15.63.238',2005-07-23 20:22:06 -green,61,Phillip,pschmidt1o@blogtalkradio.com,'177.98.201.190',1976-05-19 21:47:44 -green,62,Joan,jwebb1p@google.ru,'105.229.170.71',1972-09-07 17:53:47 -green,63,Phyllis,pkennedy1q@imgur.com,'35.145.8.244',2000-01-01 22:33:37 -green,64,Katherine,khunter1r@smh.com.au,'248.168.205.32',1991-01-09 06:40:24 -green,65,Laura,lvasquez1s@wiley.com,'128.129.115.152',1997-10-23 12:04:56 -green,66,Juan,jdunn1t@state.gov,'44.228.124.51',2004-11-10 05:07:35 -green,67,Judith,jholmes1u@wiley.com,'40.227.179.115',1977-08-02 17:01:45 -green,68,Beverly,bbaker1v@wufoo.com,'208.34.84.59',2016-03-06 20:07:23 -green,69,Lawrence,lcarr1w@flickr.com,'59.158.212.223',1988-09-13 06:07:21 -green,70,Gloria,gwilliams1x@mtv.com,'245.231.88.33',1995-03-18 22:32:46 -green,71,Steven,ssims1y@cbslocal.com,'104.50.58.255',2001-08-05 21:26:20 -green,72,Betty,bmills1z@arstechnica.com,'103.177.214.220',1981-12-14 21:26:54 -green,73,Mildred,mfuller20@prnewswire.com,'151.158.8.130',2000-04-19 10:13:55 -green,74,Donald,dday21@icq.com,'9.178.102.255',1972-12-03 00:58:24 -green,75,Eric,ethomas22@addtoany.com,'85.2.241.227',1992-11-01 05:59:30 -green,76,Joyce,jarmstrong23@sitemeter.com,'169.224.20.36',1985-10-24 06:50:01 -green,77,Maria,mmartinez24@amazonaws.com,'143.189.167.135',2005-10-05 05:17:42 -green,78,Harry,hburton25@youtube.com,'156.47.176.237',1978-03-26 05:53:33 -green,79,Kevin,klawrence26@hao123.com,'79.136.183.83',1994-10-12 04:38:52 -green,80,David,dhall27@prweb.com,'133.149.172.153',1976-12-15 16:24:24 -green,81,Kathy,kperry28@twitter.com,'229.242.72.228',1979-03-04 02:58:56 -green,82,Adam,aprice29@elegantthemes.com,'13.145.21.10',1982-11-07 11:46:59 -green,83,Brandon,bgriffin2a@va.gov,'73.249.128.212',2013-10-30 05:30:36 -green,84,Henry,hnguyen2b@discovery.com,'211.36.214.242',1985-01-09 06:37:27 -green,85,Eric,esanchez2c@edublogs.org,'191.166.188.251',2004-05-01 23:21:42 -green,86,Jason,jlee2d@jimdo.com,'193.92.16.182',1973-01-08 09:05:39 -green,87,Diana,drichards2e@istockphoto.com,'19.130.175.245',1994-10-05 22:50:49 -green,88,Andrea,awelch2f@abc.net.au,'94.155.233.96',2002-04-26 08:41:44 -green,89,Louis,lwagner2g@miitbeian.gov.cn,'26.217.34.111',2003-08-25 07:56:39 -green,90,Jane,jsims2h@seesaa.net,'43.4.220.135',1987-03-20 20:39:04 -green,91,Larry,lgrant2i@si.edu,'97.126.79.34',2000-09-07 20:26:19 -green,92,Louis,ldean2j@prnewswire.com,'37.148.40.127',2011-09-16 20:12:14 -green,93,Jennifer,jcampbell2k@xing.com,'38.106.254.142',1988-07-15 05:06:49 -green,94,Wayne,wcunningham2l@google.com.hk,'223.28.26.187',2009-12-15 06:16:54 -green,95,Lori,lstevens2m@icq.com,'181.250.181.58',1984-10-28 03:29:19 -green,96,Judy,jsimpson2n@marriott.com,'180.121.239.219',1986-02-07 15:18:10 -green,97,Phillip,phoward2o@usa.gov,'255.247.0.175',2002-12-26 08:44:45 -green,98,Gloria,gwalker2p@usa.gov,'156.140.7.128',1997-10-04 07:58:58 -green,99,Paul,pjohnson2q@umn.edu,'183.59.198.197',1991-11-14 12:33:55 -green,100,Frank,fgreene2r@blogspot.com,'150.143.68.121',2010-06-12 23:55:39 diff --git a/tests/integration/sources_test/test_sources.py b/tests/integration/sources_test/test_sources.py deleted file mode 100644 index 83a586434..000000000 --- a/tests/integration/sources_test/test_sources.py +++ /dev/null @@ -1,183 +0,0 @@ -import json -import os -from datetime import datetime, timedelta - -import yaml - -import dbt.tracking -import dbt.version -from dbt.events.functions import reset_metadata_vars -from tests.integration.base import DBTIntegrationTest, use_profile, AnyFloat, \ - AnyStringWith - - -class BaseSourcesTest(DBTIntegrationTest): - @property - def schema(self): - return "sources" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - 'seed-paths': ['seeds'], - 'quoting': {'database': True, 'schema': True, 'identifier': True}, - 'seeds': { - 'quote_columns': True, - }, - } - - def setUp(self): - super().setUp() - os.environ['DBT_TEST_SCHEMA_NAME_VARIABLE'] = 'test_run_schema' - - def tearDown(self): - del os.environ['DBT_TEST_SCHEMA_NAME_VARIABLE'] - super().tearDown() - - def run_dbt_with_vars(self, cmd, *args, **kwargs): - vars_dict = { - 'test_run_schema': self.unique_schema(), - 'test_loaded_at': self.adapter.quote('updated_at'), - } - cmd.extend(['--vars', yaml.safe_dump(vars_dict)]) - return self.run_dbt(cmd, *args, **kwargs) - - -class TestSourceFreshness(BaseSourcesTest): - def setUp(self): - super().setUp() - self.run_dbt_with_vars(['seed']) - self.maxDiff = None - self._id = 101 - # this is the db initial value - self.last_inserted_time = "2016-09-19T14:45:51+00:00" - os.environ['DBT_ENV_CUSTOM_ENV_key'] = 'value' - - def tearDown(self): - super().tearDown() - reset_metadata_vars() - del os.environ['DBT_ENV_CUSTOM_ENV_key'] - - def _set_updated_at_to(self, delta): - insert_time = datetime.utcnow() + delta - timestr = insert_time.strftime("%Y-%m-%d %H:%M:%S") - # favorite_color,id,first_name,email,ip_address,updated_at - insert_id = self._id - self._id += 1 - raw_sql = """INSERT INTO {schema}.{source} - ({quoted_columns}) - VALUES ( - 'blue',{id},'Jake','abc@example.com','192.168.1.1','{time}' - )""" - quoted_columns = ','.join( - c for c in - ('favorite_color', 'id', 'first_name', - 'email', 'ip_address', 'updated_at') - ) - self.run_sql( - raw_sql, - kwargs={ - 'schema': self.unique_schema(), - 'time': timestr, - 'id': insert_id, - 'source': self.adapter.quote('source'), - 'quoted_columns': quoted_columns, - } - ) - self.last_inserted_time = insert_time.strftime( - "%Y-%m-%dT%H:%M:%S+00:00") - - def _assert_freshness_results(self, path, state): - self.assertTrue(os.path.exists(path)) - with open(path) as fp: - data = json.load(fp) - - assert set(data) == {'metadata', 'results', 'elapsed_time'} - assert 'generated_at' in data['metadata'] - assert isinstance(data['elapsed_time'], float) - self.assertBetween(data['metadata']['generated_at'], - self.freshness_start_time) - assert data['metadata']['dbt_schema_version'] == 'https://schemas.getdbt.com/dbt/sources/v3.json' - assert data['metadata']['dbt_version'] == dbt.version.__version__ - assert data['metadata']['invocation_id'] == dbt.tracking.active_user.invocation_id - key = 'key' - if os.name == 'nt': - key = key.upper() - assert data['metadata']['env'] == {key: 'value'} - - last_inserted_time = self.last_inserted_time - - self.assertEqual(len(data['results']), 1) - - self.assertEqual(data['results'], [ - { - 'unique_id': 'source.test.test_source.test_table', - 'max_loaded_at': last_inserted_time, - 'snapshotted_at': AnyStringWith(), - 'max_loaded_at_time_ago_in_s': AnyFloat(), - 'status': state, - 'criteria': { - 'filter': None, - 'warn_after': {'count': 10, 'period': 'hour'}, - 'error_after': {'count': 18, 'period': 'hour'}, - }, - 'adapter_response': {}, - 'thread_id': AnyStringWith('Thread-'), - 'execution_time': AnyFloat(), - 'timing': [ - { - 'name': 'compile', - 'started_at': AnyStringWith(), - 'completed_at': AnyStringWith(), - }, - { - 'name': 'execute', - 'started_at': AnyStringWith(), - 'completed_at': AnyStringWith(), - } - ] - } - ]) - - def _run_source_freshness(self): - # test_source.test_table should have a loaded_at field of `updated_at` - # and a freshness of warn_after: 10 hours, error_after: 18 hours - # by default, our data set is way out of date! - self.freshness_start_time = datetime.utcnow() - reset_metadata_vars() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/error_source.json'], - expect_pass=False - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'error') - self._assert_freshness_results('target/error_source.json', 'error') - - self._set_updated_at_to(timedelta(hours=-12)) - self.freshness_start_time = datetime.utcnow() - reset_metadata_vars() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/warn_source.json'], - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'warn') - self._assert_freshness_results('target/warn_source.json', 'warn') - - self._set_updated_at_to(timedelta(hours=-2)) - self.freshness_start_time = datetime.utcnow() - reset_metadata_vars() - results = self.run_dbt_with_vars( - ['source', 'freshness', '-o', 'target/pass_source.json'], - ) - self.assertEqual(len(results), 1) - self.assertEqual(results[0].status, 'pass') - self._assert_freshness_results('target/pass_source.json', 'pass') - - @use_profile('bigquery') - def test_bigquery_source_freshness(self): - self._run_source_freshness() From 368742d8bb91b6a8a1d57470945a9d8bcabdacf8 Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Thu, 26 Jan 2023 15:58:27 -0600 Subject: [PATCH 10/16] [CT-1630] Convert column_types test for dbt-bigquery (#476) * init conversion of columns_types test for bigquery, removal of old test, dev_requirements pin changed * move bigquery specific stuff to fixture file to minimize contents of tests * revert back to having test specific fixtuers part of test file as they are not overly large additions * reset dev-requirement pointer * move all fixtures to one single file --- .../adapter/column_types/fixtures.py | 49 +++++++++++++ .../column_types/test_alter_column_types.py | 48 +++++++++++++ .../adapter/column_types/test_column_types.py | 15 ++++ .../column_type_tests/bq_models/model.sql | 5 -- .../column_type_tests/bq_models/schema.yml | 10 --- .../bq_models_alter_type/altered_schema.yml | 10 --- .../bq_models_alter_type/model.sql | 6 -- .../macros/test_alter_column_type.sql | 5 -- .../column_type_tests/macros/test_is_type.sql | 72 ------------------- .../test_alter_column_types.py | 28 -------- .../column_type_tests/test_column_types.py | 21 ------ 11 files changed, 112 insertions(+), 157 deletions(-) create mode 100644 tests/functional/adapter/column_types/fixtures.py create mode 100644 tests/functional/adapter/column_types/test_alter_column_types.py create mode 100644 tests/functional/adapter/column_types/test_column_types.py delete mode 100644 tests/integration/column_type_tests/bq_models/model.sql delete mode 100644 tests/integration/column_type_tests/bq_models/schema.yml delete mode 100644 tests/integration/column_type_tests/bq_models_alter_type/altered_schema.yml delete mode 100644 tests/integration/column_type_tests/bq_models_alter_type/model.sql delete mode 100644 tests/integration/column_type_tests/macros/test_alter_column_type.sql delete mode 100644 tests/integration/column_type_tests/macros/test_is_type.sql delete mode 100644 tests/integration/column_type_tests/test_alter_column_types.py delete mode 100644 tests/integration/column_type_tests/test_column_types.py diff --git a/tests/functional/adapter/column_types/fixtures.py b/tests/functional/adapter/column_types/fixtures.py new file mode 100644 index 000000000..b7be1e646 --- /dev/null +++ b/tests/functional/adapter/column_types/fixtures.py @@ -0,0 +1,49 @@ +_MACRO_TEST_ALTER_COLUMN_TYPE = """ +{% macro test_alter_column_type(model_name, column_name, new_column_type) %} + {% set relation = ref(model_name) %} + {{ alter_column_type(relation, column_name, new_column_type) }} +{% endmacro %} +""" + +_MODEL_SQL = """ +select + CAST(1 as int64) as int64_col, + CAST(2.0 as float64) as float64_col, + CAST(3.0 as numeric) as numeric_col, + CAST('3' as string) as string_col, +""" + +_MODEL_ALT_SQL = """ +{{ config(materialized='table') }} +select + CAST(1 as int64) as int64_col, + CAST(2.0 as float64) as float64_col, + CAST(3.0 as numeric) as numeric_col, + CAST('3' as string) as string_col, +""" + +_SCHEMA_YML = """ +version: 2 +models: + - name: model + tests: + - is_type: + column_map: + int64_col: ['integer', 'number'] + float64_col: ['float', 'number'] + numeric_col: ['numeric', 'number'] + string_col: ['string', 'not number'] +""" + +_ALT_SCHEMA_YML = """ +version: 2 +models: + - name: model + tests: + - is_type: + column_map: + int64_col: ['string', 'not number'] + float64_col: ['float', 'number'] + numeric_col: ['numeric', 'number'] + string_col: ['string', 'not number'] +""" diff --git a/tests/functional/adapter/column_types/test_alter_column_types.py b/tests/functional/adapter/column_types/test_alter_column_types.py new file mode 100644 index 000000000..9bfb88dc1 --- /dev/null +++ b/tests/functional/adapter/column_types/test_alter_column_types.py @@ -0,0 +1,48 @@ +import pytest +import yaml +from dbt.tests.util import run_dbt +from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes +from dbt.tests.adapter.column_types.fixtures import macro_test_is_type_sql +from tests.functional.adapter.column_types.fixtures import ( + _MACRO_TEST_ALTER_COLUMN_TYPE, + _MODEL_ALT_SQL, + _ALT_SCHEMA_YML +) + + + +class BaseAlterColumnTypes(BaseColumnTypes): + + @pytest.fixture(scope="class") + def macros(self): + return { + "test_is_type.sql": macro_test_is_type_sql, + "test_alter_column_type.sql": _MACRO_TEST_ALTER_COLUMN_TYPE + } + + def run_and_alter_and_test(self, alter_column_type_args): + results = run_dbt(["run"]) + assert len(results) == 1 + run_dbt(['run-operation', 'test_alter_column_type', '--args', alter_column_type_args]) + results = run_dbt(["test"]) + assert len(results) == 1 + + + +class TestBigQueryAlterColumnTypes(BaseAlterColumnTypes): + + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": _MODEL_ALT_SQL, + "schema.yml": _ALT_SCHEMA_YML + } + + def test_bigquery_alter_column_types(self, project): + alter_column_type_args = yaml.safe_dump({ + 'model_name': 'model', + 'column_name': 'int64_col', + 'new_column_type': 'string' + }) + + self.run_and_alter_and_test(alter_column_type_args) \ No newline at end of file diff --git a/tests/functional/adapter/column_types/test_column_types.py b/tests/functional/adapter/column_types/test_column_types.py new file mode 100644 index 000000000..8f7ee4351 --- /dev/null +++ b/tests/functional/adapter/column_types/test_column_types.py @@ -0,0 +1,15 @@ +import pytest +from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes +from tests.functional.adapter.column_types.fixtures import _MODEL_SQL, _SCHEMA_YML + +class TestBigQueryColumnTypes(BaseColumnTypes): + + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": _MODEL_SQL, + "schema.yml": _SCHEMA_YML + } + + def test_run_and_test(self, project): + self.run_and_test() \ No newline at end of file diff --git a/tests/integration/column_type_tests/bq_models/model.sql b/tests/integration/column_type_tests/bq_models/model.sql deleted file mode 100644 index 94e4fba18..000000000 --- a/tests/integration/column_type_tests/bq_models/model.sql +++ /dev/null @@ -1,5 +0,0 @@ -select - CAST(1 as int64) as int64_col, - CAST(2.0 as float64) as float64_col, - CAST(3.0 as numeric) as numeric_col, - CAST('3' as string) as string_col, diff --git a/tests/integration/column_type_tests/bq_models/schema.yml b/tests/integration/column_type_tests/bq_models/schema.yml deleted file mode 100644 index 8eb8a9ae2..000000000 --- a/tests/integration/column_type_tests/bq_models/schema.yml +++ /dev/null @@ -1,10 +0,0 @@ -version: 2 -models: - - name: model - tests: - - is_type: - column_map: - int64_col: ['integer', 'number'] - float64_col: ['float', 'number'] - numeric_col: ['numeric', 'number'] - string_col: ['string', 'not number'] diff --git a/tests/integration/column_type_tests/bq_models_alter_type/altered_schema.yml b/tests/integration/column_type_tests/bq_models_alter_type/altered_schema.yml deleted file mode 100644 index 3a78da404..000000000 --- a/tests/integration/column_type_tests/bq_models_alter_type/altered_schema.yml +++ /dev/null @@ -1,10 +0,0 @@ -version: 2 -models: - - name: model - tests: - - is_type: - column_map: - int64_col: ['string', 'not number'] - float64_col: ['float', 'number'] - numeric_col: ['numeric', 'number'] - string_col: ['string', 'not number'] diff --git a/tests/integration/column_type_tests/bq_models_alter_type/model.sql b/tests/integration/column_type_tests/bq_models_alter_type/model.sql deleted file mode 100644 index 066252ae5..000000000 --- a/tests/integration/column_type_tests/bq_models_alter_type/model.sql +++ /dev/null @@ -1,6 +0,0 @@ -{{ config(materialized='table') }} -select - CAST(1 as int64) as int64_col, - CAST(2.0 as float64) as float64_col, - CAST(3.0 as numeric) as numeric_col, - CAST('3' as string) as string_col, diff --git a/tests/integration/column_type_tests/macros/test_alter_column_type.sql b/tests/integration/column_type_tests/macros/test_alter_column_type.sql deleted file mode 100644 index 133d59fad..000000000 --- a/tests/integration/column_type_tests/macros/test_alter_column_type.sql +++ /dev/null @@ -1,5 +0,0 @@ --- Macro to alter a column type -{% macro test_alter_column_type(model_name, column_name, new_column_type) %} - {% set relation = ref(model_name) %} - {{ alter_column_type(relation, column_name, new_column_type) }} -{% endmacro %} diff --git a/tests/integration/column_type_tests/macros/test_is_type.sql b/tests/integration/column_type_tests/macros/test_is_type.sql deleted file mode 100644 index 2f1ffde2b..000000000 --- a/tests/integration/column_type_tests/macros/test_is_type.sql +++ /dev/null @@ -1,72 +0,0 @@ - -{% macro simple_type_check_column(column, check) %} - {% if check == 'string' %} - {{ return(column.is_string()) }} - {% elif check == 'float' %} - {{ return(column.is_float()) }} - {% elif check == 'number' %} - {{ return(column.is_number()) }} - {% elif check == 'numeric' %} - {{ return(column.is_numeric()) }} - {% elif check == 'integer' %} - {{ return(column.is_integer()) }} - {% else %} - {% do exceptions.raise_compiler_error('invalid type check value: ' ~ check) %} - {% endif %} -{% endmacro %} - -{% macro type_check_column(column, type_checks) %} - {% set failures = [] %} - {% for type_check in type_checks %} - {% if type_check.startswith('not ') %} - {% if simple_type_check_column(column, type_check[4:]) %} - {% do log('simple_type_check_column got ', True) %} - {% do failures.append(type_check) %} - {% endif %} - {% else %} - {% if not simple_type_check_column(column, type_check) %} - {% do failures.append(type_check) %} - {% endif %} - {% endif %} - {% endfor %} - {% if (failures | length) > 0 %} - {% do log('column ' ~ column.name ~ ' had failures: ' ~ failures, info=True) %} - {% endif %} - {% do return((failures | length) == 0) %} -{% endmacro %} - -{% test is_type(model, column_map) %} - {% if not execute %} - {{ return(None) }} - {% endif %} - {% if not column_map %} - {% do exceptions.raise_compiler_error('test_is_type must have a column name') %} - {% endif %} - {% set columns = adapter.get_columns_in_relation(model) %} - {% if (column_map | length) != (columns | length) %} - {% set column_map_keys = (column_map | list | string) %} - {% set column_names = (columns | map(attribute='name') | list | string) %} - {% do exceptions.raise_compiler_error('did not get all the columns/all columns not specified:\n' ~ column_map_keys ~ '\nvs\n' ~ column_names) %} - {% endif %} - {% set bad_columns = [] %} - {% for column in columns %} - {% set column_key = (column.name | lower) %} - {% if column_key in column_map %} - {% set type_checks = column_map[column_key] %} - {% if not type_checks %} - {% do exceptions.raise_compiler_error('no type checks?') %} - {% endif %} - {% if not type_check_column(column, type_checks) %} - {% do bad_columns.append(column.name) %} - {% endif %} - {% else %} - {% do exceptions.raise_compiler_error('column key ' ~ column_key ~ ' not found in ' ~ (column_map | list | string)) %} - {% endif %} - {% endfor %} - {% do log('bad columns: ' ~ bad_columns, info=True) %} - {% for bad_column in bad_columns %} - select '{{ bad_column }}' as bad_column - {{ 'union all' if not loop.last }} - {% endfor %} - select * from (select 1 limit 0) as nothing -{% endtest %} diff --git a/tests/integration/column_type_tests/test_alter_column_types.py b/tests/integration/column_type_tests/test_alter_column_types.py deleted file mode 100644 index 60bda6df1..000000000 --- a/tests/integration/column_type_tests/test_alter_column_types.py +++ /dev/null @@ -1,28 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import yaml - - -class TestAlterColumnTypes(DBTIntegrationTest): - @property - def schema(self): - return 'alter_column_types' - - def run_and_alter_and_test(self, alter_column_type_args): - self.assertEqual(len(self.run_dbt(['run'])), 1) - self.run_dbt(['run-operation', 'test_alter_column_type', '--args', alter_column_type_args]) - self.assertEqual(len(self.run_dbt(['test'])), 1) - - -class TestBigQueryAlterColumnTypes(TestAlterColumnTypes): - @property - def models(self): - return 'bq_models_alter_type' - - @use_profile('bigquery') - def test_bigquery_column_types(self): - alter_column_type_args = yaml.safe_dump({ - 'model_name': 'model', - 'column_name': 'int64_col', - 'new_column_type': 'string' - }) - self.run_and_alter_and_test(alter_column_type_args) diff --git a/tests/integration/column_type_tests/test_column_types.py b/tests/integration/column_type_tests/test_column_types.py deleted file mode 100644 index ab04cc8f3..000000000 --- a/tests/integration/column_type_tests/test_column_types.py +++ /dev/null @@ -1,21 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestColumnTypes(DBTIntegrationTest): - @property - def schema(self): - return 'column_types' - - def run_and_test(self): - self.assertEqual(len(self.run_dbt(['run'])), 1) - self.assertEqual(len(self.run_dbt(['test'])), 1) - - -class TestBigQueryColumnTypes(TestColumnTypes): - @property - def models(self): - return 'bq_models' - - @use_profile('bigquery') - def test_bigquery_column_types(self): - self.run_and_test() From 33b29dea13e00aa954a2ff771a42de0876a19216 Mon Sep 17 00:00:00 2001 From: Alexander Smolyakov Date: Fri, 27 Jan 2023 19:05:23 +0400 Subject: [PATCH 11/16] [CI/CD] Update release workflow and introduce workflow for nightly releases (#466) * Add workflows * Set default `test_run` value to `true` * Update .bumpversion.cfg * Resolve review comments - Update workflow docs - Change workflow name - Set `test_run` default value to `true` * Update Slack secret * Resolve review comments --- .bumpversion.cfg | 8 +- .github/workflows/nightly-release.yml | 109 +++++++++ .github/workflows/release.yml | 309 ++++++++++++++------------ scripts/env-setup.sh | 13 ++ 4 files changed, 291 insertions(+), 148 deletions(-) create mode 100644 .github/workflows/nightly-release.yml create mode 100644 scripts/env-setup.sh diff --git a/.bumpversion.cfg b/.bumpversion.cfg index ba1f95c9a..b018b0474 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -3,8 +3,12 @@ current_version = 1.5.0a1 parse = (?P\d+) \.(?P\d+) \.(?P\d+) - ((?Pa|b|rc)(?P\d+))? + ((?Pa|b|rc) + (?P\d+) # pre-release version num + )(\.(?P[a-z..0-9]+) + )? serialize = + {major}.{minor}.{patch}{prerelease}{num}.{nightly} {major}.{minor}.{patch}{prerelease}{num} {major}.{minor}.{patch} commit = False @@ -22,6 +26,8 @@ values = [bumpversion:part:num] first_value = 1 +[bumpversion:part:nightly] + [bumpversion:file:setup.py] [bumpversion:file:dbt/adapters/bigquery/__version__.py] diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml new file mode 100644 index 000000000..b668d62ec --- /dev/null +++ b/.github/workflows/nightly-release.yml @@ -0,0 +1,109 @@ +# **what?** +# Nightly releases to GitHub and PyPI. This workflow produces the following outcome: +# - generate and validate data for night release (commit SHA, version number, release branch); +# - pass data to release workflow; +# - night release will be pushed to GitHub as a draft release; +# - night build will be pushed to test PyPI; +# +# **why?** +# Ensure an automated and tested release process for nightly builds +# +# **when?** +# This workflow runs on schedule or can be run manually on demand. + +name: Nightly Test Release to GitHub and PyPI + +on: + workflow_dispatch: # for manual triggering + schedule: + - cron: 0 9 * * * + +permissions: + contents: write # this is the permission that allows creating a new release + +defaults: + run: + shell: bash + +env: + RELEASE_BRANCH: "main" + +jobs: + aggregate-release-data: + runs-on: ubuntu-latest + + outputs: + commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }} + version_number: ${{ steps.nightly-release-version.outputs.number }} + release_branch: ${{ steps.release-branch.outputs.name }} + + steps: + - name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}" + uses: actions/checkout@v3 + with: + ref: ${{ env.RELEASE_BRANCH }} + + - name: "Resolve Commit To Release" + id: resolve-commit-sha + run: | + commit_sha=$(git rev-parse HEAD) + echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT + + - name: "Get Current Version Number" + id: version-number-sources + run: | + current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '` + echo "current_version=$current_version" >> $GITHUB_OUTPUT + + - name: "Audit Version And Parse Into Parts" + id: semver + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ steps.version-number-sources.outputs.current_version }} + + - name: "Get Current Date" + id: current-date + run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT + + - name: "Generate Nightly Release Version Number" + id: nightly-release-version + run: | + number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}+nightly" + echo "number=$number" >> $GITHUB_OUTPUT + + - name: "Audit Nightly Release Version And Parse Into Parts" + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ steps.nightly-release-version.outputs.number }} + + - name: "Set Release Branch" + id: release-branch + run: | + echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT + + log-outputs-aggregate-release-data: + runs-on: ubuntu-latest + needs: [aggregate-release-data] + + steps: + - name: "[DEBUG] Log Outputs" + run: | + echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }} + echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }} + echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }} + + release-github-pypi: + needs: [aggregate-release-data] + + uses: ./.github/workflows/release.yml + with: + sha: ${{ needs.aggregate-release-data.outputs.commit_sha }} + target_branch: ${{ needs.aggregate-release-data.outputs.release-branch }} + version_number: ${{ needs.aggregate-release-data.outputs.version_number }} + build_script_path: "scripts/build-dist.sh" + env_setup_script_path: "scripts/env-setup.sh" + s3_bucket_name: "core-team-artifacts" + package_test_command: "dbt --version" + test_run: true + nightly_release: true + secrets: inherit diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5fd0291e9..a9f60734b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,14 +1,19 @@ # **what?** -# Take the given commit, run unit tests specifically on that sha, build and -# package it, and then release to GitHub with that specific build (PyPi to follow later) - +# Release workflow provides the following steps: +# - checkout the given commit; +# - validate version in sources and changelog file for given version; +# - bump the version and generate a changelog if needed; +# - merge all changes to the target branch if needed; +# - run unit and integration tests against given commit; +# - build and package that SHA; +# - release it to GitHub and PyPI with that specific build; +# # **why?** # Ensure an automated and tested release process - +# # **when?** -# This will only run manually with a given sha and version - -name: Build, Test, and Package +# This workflow can be run manually on demand or can be called by other workflows +name: Release to GitHub and PyPI on: workflow_dispatch: @@ -17,17 +22,85 @@ on: description: "The last commit sha in the release" type: string required: true - changelog_path: - description: "Path to changes log" + target_branch: + description: "The branch to release from" + type: string + required: true + version_number: + description: "The release version number (i.e. 1.0.0b1)" + type: string + required: true + build_script_path: + description: "Build script path" + type: string + default: "scripts/build-dist.sh" + required: true + env_setup_script_path: + description: "Environment setup script path" type: string - default: "./CHANGELOG.md" + default: "scripts/env-setup.sh" + required: false + s3_bucket_name: + description: "AWS S3 bucket name" + type: string + default: "core-team-artifacts" + required: true + package_test_command: + description: "Package test command" + type: string + default: "dbt --version" + required: true + test_run: + description: "Test run (Publish release as draft)" + type: boolean + default: true + required: false + nightly_release: + description: "Nightly release to dev environment" + type: boolean + default: false required: false + workflow_call: + inputs: + sha: + description: "The last commit sha in the release" + type: string + required: true + target_branch: + description: "The branch to release from" + type: string + required: true version_number: description: "The release version number (i.e. 1.0.0b1)" type: string required: true + build_script_path: + description: "Build script path" + type: string + default: "scripts/build-dist.sh" + required: true + env_setup_script_path: + description: "Environment setup script path" + type: string + default: "scripts/env-setup.sh" + required: false + s3_bucket_name: + description: "AWS S3 bucket name" + type: string + default: "core-team-artifacts" + required: true + package_test_command: + description: "Package test command" + type: string + default: "dbt --version" + required: true test_run: - description: "Test run (Publish release as draft to GitHub)" + description: "Test run (Publish release as draft)" + type: boolean + default: true + required: false + nightly_release: + description: "Nightly release to dev environment" type: boolean default: false required: false @@ -35,10 +108,6 @@ on: permissions: contents: write # this is the permission that allows creating a new release -env: - PYTHON_TARGET_VERSION: 3.8 - ARTIFACT_RETENTION_DAYS: 2 - defaults: run: shell: bash @@ -50,164 +119,110 @@ jobs: steps: - name: "[DEBUG] Print Variables" run: | - echo The last commit sha in the release: ${{ inputs.sha }} - echo The release version number: ${{ inputs.version_number }} - echo The path to the changelog markdpown: ${{ inputs.changelog_path }} - echo This is a test run: ${{ inputs.test_run }} - echo Python target version: ${{ env.PYTHON_TARGET_VERSION }} - echo Artifact retention days: ${{ env.ARTIFACT_RETENTION_DAYS }} - - unit: - name: Unit Test - runs-on: ubuntu-latest + echo The last commit sha in the release: ${{ inputs.sha }} + echo The branch to release from: ${{ inputs.target_branch }} + echo The release version number: ${{ inputs.version_number }} + echo Build script path: ${{ inputs.build_script_path }} + echo Environment setup script path: ${{ inputs.env_setup_script_path }} + echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }} + echo Package test command: ${{ inputs.package_test_command }} + echo Test run: ${{ inputs.test_run }} + echo Nightly release: ${{ inputs.nightly_release }} - env: - TOXENV: "unit" + bump-version-generate-changelog: + name: Bump package version, Generate changelog - steps: - - name: "Checkout Commit - ${{ inputs.sha }}" - uses: actions/checkout@v3 - with: - persist-credentials: false - ref: ${{ github.event.inputs.sha }} - - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install tox - python -m pip --version - python -m tox --version + uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main - - name: "Run Tox" - run: tox - - build: - name: Build Packages - - runs-on: ubuntu-latest - - steps: - - name: "Checkout Commit - ${{ inputs.sha }}" - uses: actions/checkout@v3 - with: - persist-credentials: false - ref: ${{ inputs.sha }} - - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" - run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade setuptools wheel twine check-wheel-contents - python -m pip --version - - - name: "Build Distributions" - run: ./scripts/build-dist.sh - - - name: "[DEBUG] Show Distributions" - run: ls -lh dist/ - - - name: "Check Distribution Descriptions" - run: | - twine check dist/* - - - name: "[DEBUG] Check Wheel Contents" - run: | - check-wheel-contents dist/*.whl --ignore W007,W008 + with: + sha: ${{ inputs.sha }} + version_number: ${{ inputs.version_number }} + target_branch: ${{ inputs.target_branch }} + env_setup_script_path: ${{ inputs.env_setup_script_path }} + test_run: ${{ inputs.test_run }} + nightly_release: ${{ inputs.nightly_release }} - - name: "Upload Build Artifact - ${{ inputs.version_number }}" - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: | - dist/ - !dist/dbt-${{ inputs.version_number }}.tar.gz - retention-days: ${{ env.ARTIFACT_RETENTION_DAYS }} + secrets: inherit - test-build: - name: Verify Packages + log-outputs-bump-version-generate-changelog: + name: "[Log output] Bump package version, Generate changelog" + if: ${{ !failure() && !cancelled() }} - needs: [unit, build] + needs: [bump-version-generate-changelog] runs-on: ubuntu-latest steps: - - name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_TARGET_VERSION }} - - - name: "Install Python Dependencies" + - name: Print variables run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade wheel - python -m pip --version - - - name: "Download Build Artifact - ${{ inputs.version_number }}" - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: dist/ + echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }} + echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} - - name: "[DEBUG] Show Distributions" - run: ls -lh dist/ - - - name: "Install Wheel Distributions" - run: | - find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + build-test-package: + name: Build, Test, Package + if: ${{ !failure() && !cancelled() }} + needs: [bump-version-generate-changelog] - - name: "[DEBUG] Check Wheel Distributions" - run: | - dbt --version + uses: dbt-labs/dbt-release/.github/workflows/build.yml@main - - name: "Install Source Distributions" - run: | - find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + with: + sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} + version_number: ${{ inputs.version_number }} + changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} + build_script_path: ${{ inputs.build_script_path }} + s3_bucket_name: ${{ inputs.s3_bucket_name }} + package_test_command: ${{ inputs.package_test_command }} + test_run: ${{ inputs.test_run }} + nightly_release: ${{ inputs.nightly_release }} - - name: "[DEBUG] Check Source Distributions" - run: | - dbt --version + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.PRODUCTION_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.PRODUCTION_AWS_SECRET_ACCESS_KEY }} github-release: name: GitHub Release if: ${{ !failure() && !cancelled() }} - needs: test-build - # pin to commit since this is workflow is WIP but this commit has been tested as working - uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@7b6e01d73d2c8454e06302cc66ef4c2dbd4dbe4e + needs: [bump-version-generate-changelog, build-test-package] + + uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main with: - sha: ${{ inputs.sha }} + sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} version_number: ${{ inputs.version_number }} - changelog_path: ${{ inputs.changelog_path }} + changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} test_run: ${{ inputs.test_run }} pypi-release: - name: Pypi release - # only release to PyPi if we're not testing - will release to PyPi test when workflow gets rewritten - if: ${{ inputs.test_run == false }} + name: PyPI Release - runs-on: ubuntu-latest + needs: [github-release] - needs: github-release + uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main - environment: PypiProd - steps: - - name: "Download Build Artifact - ${{ inputs.version_number }}" - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.version_number }} - path: dist/ - - - name: Publish distribution to PyPI - uses: pypa/gh-action-pypi-publish@v1.4.2 - with: - password: ${{ secrets.PYPI_API_TOKEN }} + with: + version_number: ${{ inputs.version_number }} + test_run: ${{ inputs.test_run }} + + secrets: + PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + + slack-notification: + name: Slack Notification + if: ${{ failure() }} + + needs: + [ + bump-version-generate-changelog, + build-test-package, + github-release, + pypi-release, + ] + + uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main + with: + status: "failure" + + secrets: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }} diff --git a/scripts/env-setup.sh b/scripts/env-setup.sh new file mode 100644 index 000000000..726a00cf8 --- /dev/null +++ b/scripts/env-setup.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Set TOXENV environment variable for subsequent steps +echo "TOXENV=integration-bigquery" >> $GITHUB_ENV +# Set INTEGRATION_TESTS_SECRETS_PREFIX environment variable for subsequent steps +# All GH secrets that have this prefix will be set as environment variables +echo "INTEGRATION_TESTS_SECRETS_PREFIX=BIGQUERY_TEST" >> $GITHUB_ENV +# Set environment variables required for integration tests +echo "DBT_TEST_USER_1=group:buildbot@dbtlabs.com" >> $GITHUB_ENV +echo "DBT_TEST_USER_2=group:dev-core@dbtlabs.com" >> $GITHUB_ENV +echo "DBT_TEST_USER_3=serviceAccount:dbt-integration-test-user@dbt-test-env.iam.gserviceaccount.com" >> $GITHUB_ENV +echo "DATAPROC_REGION=us-central1" >> $GITHUB_ENV +echo "DATAPROC_CLUSTER_NAME=dbt-test-1" >> $GITHUB_ENV +echo "GCS_BUCKET=dbt-ci" >> $GITHUB_ENV From 3f8bf458250236c07d7019025018b39a582ca4d0 Mon Sep 17 00:00:00 2001 From: Neelesh Salian Date: Fri, 27 Jan 2023 10:21:05 -0800 Subject: [PATCH 12/16] Deleted redundant tests (#484) --- .../bigquery_test/copy-models/additional.sql | 1 - .../copy-models/copy_as_incremental.sql | 2 - .../copy-models/copy_as_several_tables.sql | 1 - .../copy-models/copy_as_table.sql | 1 - .../bigquery_test/copy-models/original.sql | 1 - .../bigquery_test/copy-models/schema.yml | 6 -- .../execution-project-models/model.sql | 1 - .../execution-project-models/schema.yml | 10 --- .../test_bigquery_adapter_functions.py | 84 ------------------- .../test_bigquery_copy_models.py | 41 --------- .../test_bigquery_execution_project.py | 23 ----- .../test_bigquery_query_results.py | 24 ------ .../test_bigquery_repeated_records.py | 68 --------------- 13 files changed, 263 deletions(-) delete mode 100644 tests/integration/bigquery_test/copy-models/additional.sql delete mode 100644 tests/integration/bigquery_test/copy-models/copy_as_incremental.sql delete mode 100644 tests/integration/bigquery_test/copy-models/copy_as_several_tables.sql delete mode 100644 tests/integration/bigquery_test/copy-models/copy_as_table.sql delete mode 100644 tests/integration/bigquery_test/copy-models/original.sql delete mode 100644 tests/integration/bigquery_test/copy-models/schema.yml delete mode 100644 tests/integration/bigquery_test/execution-project-models/model.sql delete mode 100644 tests/integration/bigquery_test/execution-project-models/schema.yml delete mode 100644 tests/integration/bigquery_test/test_bigquery_adapter_functions.py delete mode 100644 tests/integration/bigquery_test/test_bigquery_copy_models.py delete mode 100644 tests/integration/bigquery_test/test_bigquery_execution_project.py delete mode 100644 tests/integration/bigquery_test/test_bigquery_query_results.py delete mode 100644 tests/integration/bigquery_test/test_bigquery_repeated_records.py diff --git a/tests/integration/bigquery_test/copy-models/additional.sql b/tests/integration/bigquery_test/copy-models/additional.sql deleted file mode 100644 index 33560d6c0..000000000 --- a/tests/integration/bigquery_test/copy-models/additional.sql +++ /dev/null @@ -1 +0,0 @@ -select 2 as id diff --git a/tests/integration/bigquery_test/copy-models/copy_as_incremental.sql b/tests/integration/bigquery_test/copy-models/copy_as_incremental.sql deleted file mode 100644 index bbe8e5acd..000000000 --- a/tests/integration/bigquery_test/copy-models/copy_as_incremental.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(copy_materialization='incremental') }} -{{ ref('original') }} \ No newline at end of file diff --git a/tests/integration/bigquery_test/copy-models/copy_as_several_tables.sql b/tests/integration/bigquery_test/copy-models/copy_as_several_tables.sql deleted file mode 100644 index 99b04e1b3..000000000 --- a/tests/integration/bigquery_test/copy-models/copy_as_several_tables.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('original') }}, {{ source('test_copy_several_tables', 'additional') }} diff --git a/tests/integration/bigquery_test/copy-models/copy_as_table.sql b/tests/integration/bigquery_test/copy-models/copy_as_table.sql deleted file mode 100644 index 3f2fe2550..000000000 --- a/tests/integration/bigquery_test/copy-models/copy_as_table.sql +++ /dev/null @@ -1 +0,0 @@ -{{ ref('original') }} diff --git a/tests/integration/bigquery_test/copy-models/original.sql b/tests/integration/bigquery_test/copy-models/original.sql deleted file mode 100644 index 26d9cae7b..000000000 --- a/tests/integration/bigquery_test/copy-models/original.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as id \ No newline at end of file diff --git a/tests/integration/bigquery_test/copy-models/schema.yml b/tests/integration/bigquery_test/copy-models/schema.yml deleted file mode 100644 index defbd47b5..000000000 --- a/tests/integration/bigquery_test/copy-models/schema.yml +++ /dev/null @@ -1,6 +0,0 @@ -version: 2 -sources: - - name: test_copy_several_tables - schema: "{{ target.schema }}" - tables: - - name: additional diff --git a/tests/integration/bigquery_test/execution-project-models/model.sql b/tests/integration/bigquery_test/execution-project-models/model.sql deleted file mode 100644 index 43258a714..000000000 --- a/tests/integration/bigquery_test/execution-project-models/model.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as id diff --git a/tests/integration/bigquery_test/execution-project-models/schema.yml b/tests/integration/bigquery_test/execution-project-models/schema.yml deleted file mode 100644 index 390165a86..000000000 --- a/tests/integration/bigquery_test/execution-project-models/schema.yml +++ /dev/null @@ -1,10 +0,0 @@ -version: 2 -models: -- name: model - description: | - I'm testing the profile execution_project - tests: - - project_for_job_id: - region: region-us - project_id: "{{ project_id}}" - unique_schema_id: "{{ unique_schema_id }}" \ No newline at end of file diff --git a/tests/integration/bigquery_test/test_bigquery_adapter_functions.py b/tests/integration/bigquery_test/test_bigquery_adapter_functions.py deleted file mode 100644 index 427470f1f..000000000 --- a/tests/integration/bigquery_test/test_bigquery_adapter_functions.py +++ /dev/null @@ -1,84 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, FakeArgs, use_profile -import yaml - - -class TestBigqueryAdapterFunctions(DBTIntegrationTest): - - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "adapter-models" - - @property - def profile_config(self): - return self.bigquery_profile() - - @use_profile('bigquery') - def test__bigquery_adapter_functions(self): - results = self.run_dbt() - self.assertEqual(len(results), 3) - - for result in results: - # all queries in adapter models are jobs that are expected to have a location/project_id/job_id - assert result.adapter_response["location"] is not None - assert result.adapter_response["project_id"] is not None - assert result.adapter_response["job_id"] is not None - assert result.adapter_response["slot_ms"] is not None - - test_results = self.run_dbt(['test']) - - self.assertTrue(len(test_results) > 0) - for result in test_results: - self.assertEqual(result.status, 'pass') - self.assertFalse(result.skipped) - self.assertEqual(result.failures, 0) - - -class TestBigqueryAdapterMacros(DBTIntegrationTest): - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "models" - - def _create_schema_named(self, database, schema): - # do not create the initial schema. We'll do this ourselves! - pass - - @use_profile('bigquery') - def test__bigquery_run_create_drop_schema(self): - schema_args = yaml.safe_dump({ - 'db_name': self.default_database, - 'schema_name': self.unique_schema(), - }) - self.run_dbt( - ['run-operation', 'my_create_schema', '--args', schema_args]) - relation_args = yaml.safe_dump({ - 'db_name': self.default_database, - 'schema_name': self.unique_schema(), - 'table_name': 'some_table', - }) - self.run_dbt(['run-operation', 'my_create_table_as', - '--args', relation_args]) - # exercise list_relations_without_caching and get_columns_in_relation - self.run_dbt( - ['run-operation', 'ensure_one_relation_in', '--args', schema_args]) - # now to drop the schema - schema_relation = self.adapter.Relation.create( - database=self.default_database, schema=self.unique_schema()).without_identifier() - with self.adapter.connection_named('test'): - results = self.adapter.list_relations_without_caching( - schema_relation) - assert len(results) == 1 - - self.run_dbt( - ['run-operation', 'my_drop_schema', '--args', schema_args]) - with self.adapter.connection_named('test'): - results = self.adapter.list_relations_without_caching( - schema_relation) - assert len(results) == 0 diff --git a/tests/integration/bigquery_test/test_bigquery_copy_models.py b/tests/integration/bigquery_test/test_bigquery_copy_models.py deleted file mode 100644 index f4362659b..000000000 --- a/tests/integration/bigquery_test/test_bigquery_copy_models.py +++ /dev/null @@ -1,41 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import textwrap -import yaml - - -class TestBigqueryCopyTable(DBTIntegrationTest): - - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "copy-models" - - @property - def profile_config(self): - return self.bigquery_profile() - - @property - def project_config(self): - return yaml.safe_load(textwrap.dedent('''\ - config-version: 2 - models: - test: - original: - materialized: table - additional: - materialized: table - copy_as_table: - materialized: copy - copy_as_several_tables: - materialized: copy - copy_as_incremental: - materialized: copy - ''')) - - @use_profile('bigquery') - def test__bigquery_copy_table(self): - results = self.run_dbt() - self.assertEqual(len(results), 5) diff --git a/tests/integration/bigquery_test/test_bigquery_execution_project.py b/tests/integration/bigquery_test/test_bigquery_execution_project.py deleted file mode 100644 index 50c4c92f5..000000000 --- a/tests/integration/bigquery_test/test_bigquery_execution_project.py +++ /dev/null @@ -1,23 +0,0 @@ -import os -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestAlternateExecutionProjectBigQueryRun(DBTIntegrationTest): - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "execution-project-models" - - @use_profile('bigquery') - def test__bigquery_execute_project(self): - results = self.run_dbt(['run', '--models', 'model']) - self.assertEqual(len(results), 1) - execution_project = os.environ['BIGQUERY_TEST_ALT_DATABASE'] - self.run_dbt(['test', - '--target', 'alternate', - '--vars', '{ project_id: %s, unique_schema_id: %s }' - % (execution_project, self.unique_schema())], - expect_pass=False) diff --git a/tests/integration/bigquery_test/test_bigquery_query_results.py b/tests/integration/bigquery_test/test_bigquery_query_results.py deleted file mode 100644 index 6cd32f383..000000000 --- a/tests/integration/bigquery_test/test_bigquery_query_results.py +++ /dev/null @@ -1,24 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestBaseBigQueryResults(DBTIntegrationTest): - - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - 'macro-paths': ['macros'], - } - - @use_profile('bigquery') - def test__bigquery_type_inference(self): - result = self.run_dbt(['run-operation', 'test_int_inference']) - self.assertTrue(result.success) diff --git a/tests/integration/bigquery_test/test_bigquery_repeated_records.py b/tests/integration/bigquery_test/test_bigquery_repeated_records.py deleted file mode 100644 index 16ee69cda..000000000 --- a/tests/integration/bigquery_test/test_bigquery_repeated_records.py +++ /dev/null @@ -1,68 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import json - -class TestBaseBigQueryRun(DBTIntegrationTest): - - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "models" - - @property - def project_config(self): - return { - 'config-version': 2, - 'macro-paths': ['macros'], - } - - @use_profile('bigquery') - def test__bigquery_fetch_nested_records(self): - sql = """ - select - struct( - cast('Michael' as string) as fname, - cast('Stonebreaker' as string) as lname - ) as user, - [ - struct(1 as val_1, cast(2.12 as numeric) as val_2), - struct(3 as val_1, cast(4.83 as numeric) as val_2) - ] as val - - union all - - select - struct( - cast('Johnny' as string) as fname, - cast('Brickmaker' as string) as lname - ) as user, - [ - struct(7 as val_1, cast(8 as numeric) as val_2), - struct(9 as val_1, cast(null as numeric) as val_2) - ] as val - """ - - - status, res = self.adapter.execute(sql, fetch=True) - - self.assertEqual(len(res), 2, "incorrect row count") - - expected = { - "user": [ - '{"fname": "Michael", "lname": "Stonebreaker"}', - '{"fname": "Johnny", "lname": "Brickmaker"}' - ], - "val": [ - '[{"val_1": 1, "val_2": 2.12}, {"val_1": 3, "val_2": 4.83}]', - '[{"val_1": 7, "val_2": 8}, {"val_1": 9, "val_2": null}]' - ] - } - - for i, key in enumerate(expected): - line = "row {} for key {} ({} vs {})".format(i, key, expected[key][i], res[i][key]) - # py2 serializes these in an unordered way - deserialize to compare - v1 = expected[key][i] - v2 = res[i][key] - self.assertEqual(json.loads(v1), json.loads(v2), line) From 6f945febb0076ef74ccb1df69f65e8e9cbc5d291 Mon Sep 17 00:00:00 2001 From: Alexander Smolyakov Date: Mon, 30 Jan 2023 19:19:06 +0400 Subject: [PATCH 13/16] Update release workflow (#497) - Update AWS secrets - Rework condition for Slack notification --- .github/workflows/release.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a9f60734b..1c0885001 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -176,8 +176,8 @@ jobs: nightly_release: ${{ inputs.nightly_release }} secrets: - AWS_ACCESS_KEY_ID: ${{ secrets.PRODUCTION_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.PRODUCTION_AWS_SECRET_ACCESS_KEY }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} github-release: name: GitHub Release @@ -210,7 +210,7 @@ jobs: slack-notification: name: Slack Notification - if: ${{ failure() }} + if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }} needs: [ From 446d30b63a160b66ac581673bf858beca85c7d69 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Mon, 30 Jan 2023 11:19:26 -0800 Subject: [PATCH 14/16] remove case sensitive and copy fail integ tests (#494) --- .../case-sensitive-models/fUnKyCaSe.sql | 5 --- .../case-sensitive-schemas/model.sql | 5 --- .../copy_bad_materialization.sql | 2 -- .../copy-failing-models/original.sql | 1 - .../test_bigquery_case_sensitive.py | 34 ------------------ .../test_bigquery_copy_failing_models.py | 36 ------------------- 6 files changed, 83 deletions(-) delete mode 100644 tests/integration/bigquery_test/case-sensitive-models/fUnKyCaSe.sql delete mode 100644 tests/integration/bigquery_test/case-sensitive-schemas/model.sql delete mode 100644 tests/integration/bigquery_test/copy-failing-models/copy_bad_materialization.sql delete mode 100644 tests/integration/bigquery_test/copy-failing-models/original.sql delete mode 100644 tests/integration/bigquery_test/test_bigquery_case_sensitive.py delete mode 100644 tests/integration/bigquery_test/test_bigquery_copy_failing_models.py diff --git a/tests/integration/bigquery_test/case-sensitive-models/fUnKyCaSe.sql b/tests/integration/bigquery_test/case-sensitive-models/fUnKyCaSe.sql deleted file mode 100644 index 1934d4cfc..000000000 --- a/tests/integration/bigquery_test/case-sensitive-models/fUnKyCaSe.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='incremental') }} -select 1 as id -{% if is_incremental() %} -this is a syntax error! -{% endif %} diff --git a/tests/integration/bigquery_test/case-sensitive-schemas/model.sql b/tests/integration/bigquery_test/case-sensitive-schemas/model.sql deleted file mode 100644 index 1934d4cfc..000000000 --- a/tests/integration/bigquery_test/case-sensitive-schemas/model.sql +++ /dev/null @@ -1,5 +0,0 @@ -{{ config(materialized='incremental') }} -select 1 as id -{% if is_incremental() %} -this is a syntax error! -{% endif %} diff --git a/tests/integration/bigquery_test/copy-failing-models/copy_bad_materialization.sql b/tests/integration/bigquery_test/copy-failing-models/copy_bad_materialization.sql deleted file mode 100644 index b6093645d..000000000 --- a/tests/integration/bigquery_test/copy-failing-models/copy_bad_materialization.sql +++ /dev/null @@ -1,2 +0,0 @@ -{{ config(copy_materialization='view') }} -{{ ref('original') }} \ No newline at end of file diff --git a/tests/integration/bigquery_test/copy-failing-models/original.sql b/tests/integration/bigquery_test/copy-failing-models/original.sql deleted file mode 100644 index 26d9cae7b..000000000 --- a/tests/integration/bigquery_test/copy-failing-models/original.sql +++ /dev/null @@ -1 +0,0 @@ -select 1 as id \ No newline at end of file diff --git a/tests/integration/bigquery_test/test_bigquery_case_sensitive.py b/tests/integration/bigquery_test/test_bigquery_case_sensitive.py deleted file mode 100644 index 60700eada..000000000 --- a/tests/integration/bigquery_test/test_bigquery_case_sensitive.py +++ /dev/null @@ -1,34 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestCaseSensitiveModelBigQueryRun(DBTIntegrationTest): - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "case-sensitive-models" - - @use_profile('bigquery') - def test__bigquery_double_run_fails(self): - results = self.run_dbt() - self.assertEqual(len(results), 1) - self.run_dbt(expect_pass=False) - - -class TestCaseSensitiveSchemaBigQueryRun(TestCaseSensitiveModelBigQueryRun): - # same test, but the schema is funky instead of the model name - @property - def schema(self): - return "bigquery_test" - - def unique_schema(self): - schema = self.schema - - to_return = "{}_{}".format(self.prefix, schema) - return to_return - - @property - def models(self): - return "case-sensitive-schemas" diff --git a/tests/integration/bigquery_test/test_bigquery_copy_failing_models.py b/tests/integration/bigquery_test/test_bigquery_copy_failing_models.py deleted file mode 100644 index 659525663..000000000 --- a/tests/integration/bigquery_test/test_bigquery_copy_failing_models.py +++ /dev/null @@ -1,36 +0,0 @@ -from tests.integration.base import DBTIntegrationTest, use_profile -import textwrap -import yaml - - -class TestBigqueryCopyTableFails(DBTIntegrationTest): - - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "copy-failing-models" - - @property - def profile_config(self): - return self.bigquery_profile() - - @property - def project_config(self): - return yaml.safe_load(textwrap.dedent('''\ - config-version: 2 - models: - test: - original: - materialized: table - copy_bad_materialization: - materialized: copy - ''')) - - @use_profile('bigquery') - def test__bigquery_copy_table_fails(self): - results = self.run_dbt(expect_pass=False) - self.assertEqual(len(results), 2) - self.assertEqual(results[1].status, 'error') From 73f36024d45a756e6bf88e934909f2c07cb217a7 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Mon, 30 Jan 2023 11:23:10 -0800 Subject: [PATCH 15/16] convert update columns tests (#488) * convert update columns tests * remove integ test fixtures --- tests/functional/test_update_column_policy.py | 61 +++++++++++++ .../test_update_field_description.py | 58 +++++++++++++ .../test_bigquery_update_columns.py | 85 ------------------- .../description_table.sql | 9 -- .../description_table.yml | 7 -- .../policy_tag_table.sql | 9 -- .../policy_tag_table.yml | 8 -- 7 files changed, 119 insertions(+), 118 deletions(-) create mode 100644 tests/functional/test_update_column_policy.py create mode 100644 tests/functional/test_update_field_description.py delete mode 100644 tests/integration/bigquery_test/test_bigquery_update_columns.py delete mode 100644 tests/integration/bigquery_test/update-column-description/description_table.sql delete mode 100644 tests/integration/bigquery_test/update-column-description/description_table.yml delete mode 100644 tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.sql delete mode 100644 tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.yml diff --git a/tests/functional/test_update_column_policy.py b/tests/functional/test_update_column_policy.py new file mode 100644 index 000000000..0599c0099 --- /dev/null +++ b/tests/functional/test_update_column_policy.py @@ -0,0 +1,61 @@ +import pytest +from dbt.tests.util import ( + run_dbt, get_connection, relation_from_name +) + +from dbt.adapters.bigquery import BigQueryRelation + +_POLICY_TAG_MODEL = """{{ + config( + materialized='table', + persist_docs={ 'columns': true } + ) +}} + +select + 1 field +""" + +_POLICY_TAG_YML = """version: 2 + +models: +- name: policy_tag_table + columns: + - name: field + policy_tags: + - '{{ var("policy_tag") }}' +""" + +# Manually generated https://console.cloud.google.com/bigquery/policy-tags?project=dbt-test-env +_POLICY_TAG = "projects/dbt-test-env/locations/us/taxonomies/5785568062805976401/policyTags/135489647357012267" +_POLICY_TAG_MODEL_NAME = "policy_tag_table" + +class TestBigqueryUpdateColumnPolicy: + """See BQ docs for more info on policy tags: + https://cloud.google.com/bigquery/docs/column-level-security#work_with_policy_tags""" + @pytest.fixture(scope="class") + def project_config_update(self): + return { + 'config-version': 2, + 'vars': { + 'policy_tag': _POLICY_TAG + } + } + + @pytest.fixture(scope="class") + def models(self): + return { + f"{_POLICY_TAG_MODEL_NAME}.sql": _POLICY_TAG_MODEL, + "schema.yml": _POLICY_TAG_YML + } + + def test_bigquery_update_column_policy_tag(self, project): + results = run_dbt(['run', '--models', 'policy_tag_table']) + assert len(results) == 1 + relation: BigQueryRelation = relation_from_name(project.adapter, _POLICY_TAG_MODEL_NAME) + adapter = project.adapter + with get_connection(project.adapter) as conn: + table = conn.handle.get_table( + adapter.connections.get_bq_table(relation.database, relation.schema, relation.table)) + for schema_field in table.schema: + assert schema_field.policy_tags.names == (_POLICY_TAG,) diff --git a/tests/functional/test_update_field_description.py b/tests/functional/test_update_field_description.py new file mode 100644 index 000000000..68be9c42f --- /dev/null +++ b/tests/functional/test_update_field_description.py @@ -0,0 +1,58 @@ +import pytest +from dbt.tests.util import ( + relation_from_name, + get_connection, + run_dbt +) + +from dbt.adapters.bigquery import BigQueryRelation + +_FIELD_DESCRIPTION_MODEL = """{{ + config( + materialized='table', + persist_docs={ 'columns': true } + ) +}} + +select + 1 field +""" +_FIELD_DESCRIPTION_MODEL_NAME = "field_description_model" +_FIELD_DESCRIPTION = 'this is not a field' +_FIELD_DESCRIPTION_MODEL_YML = """ +version: 2 + +models: +- name: field_description_model + columns: + - name: field + description: '{{ var("field_description") }}' +""" + +class TestBigqueryUpdateColumnDescription: + @pytest.fixture(scope="class") + def project_config_update(self): + return { + 'config-version': 2, + 'vars': { + 'field_description': _FIELD_DESCRIPTION + } + } + + @pytest.fixture(scope="class") + def models(self): + return { + f"{_FIELD_DESCRIPTION_MODEL_NAME}.sql": _FIELD_DESCRIPTION_MODEL, + "schema.yml": _FIELD_DESCRIPTION_MODEL_YML + } + + def test_bigquery_update_column_description(self, project): + results = run_dbt(['run']) + assert len(results) == 1 + relation: BigQueryRelation = relation_from_name(project.adapter, _FIELD_DESCRIPTION_MODEL_NAME) + adapter = project.adapter + with get_connection(project.adapter) as conn: + table = conn.handle.get_table( + adapter.connections.get_bq_table(relation.database, relation.schema, relation.table)) + for schema_field in table.schema: + assert schema_field.description == _FIELD_DESCRIPTION diff --git a/tests/integration/bigquery_test/test_bigquery_update_columns.py b/tests/integration/bigquery_test/test_bigquery_update_columns.py deleted file mode 100644 index ed3c93556..000000000 --- a/tests/integration/bigquery_test/test_bigquery_update_columns.py +++ /dev/null @@ -1,85 +0,0 @@ -import os - -from tests.integration.base import DBTIntegrationTest, use_profile - - -class TestBigqueryUpdateColumnPolicyTag(DBTIntegrationTest): - - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "update-column-policy-tag" - - @property - def project_config(self): - return { - 'config-version': 2, - 'vars': { - 'policy_tag': self.policy_tag - } - } - - @property - def policy_tag(self): - return os.environ.get('BIGQUERY_POLICY_TAG') - - @use_profile('bigquery') - def test__bigquery_update_column_policy_tag(self): - if self.policy_tag: - results = self.run_dbt(['run', '--models', 'policy_tag_table']) - self.assertEqual(len(results), 1) - - with self.get_connection() as conn: - client = conn.handle - - table = client.get_table( - self.adapter.connections.get_bq_table( - self.default_database, self.unique_schema(), 'policy_tag_table') - ) - - for schema_field in table.schema: - self.assertEquals(schema_field.policy_tags.names, - (self.policy_tag,)) - - -class TestBigqueryUpdateColumnDescription(DBTIntegrationTest): - - @property - def schema(self): - return "bigquery_test" - - @property - def models(self): - return "update-column-description" - - @property - def project_config(self): - return { - 'config-version': 2, - 'vars': { - 'field_description': self.field_description - } - } - - @property - def field_description(self): - return 'this is a field' - - @use_profile('bigquery') - def test__bigquery_update_column_description(self): - results = self.run_dbt(['run', '--models', 'description_table']) - self.assertEqual(len(results), 1) - - with self.get_connection() as conn: - client = conn.handle - - table = client.get_table( - self.adapter.connections.get_bq_table( - self.default_database, self.unique_schema(), 'description_table') - ) - - for schema_field in table.schema: - self.assertEquals(schema_field.description, self.field_description) diff --git a/tests/integration/bigquery_test/update-column-description/description_table.sql b/tests/integration/bigquery_test/update-column-description/description_table.sql deleted file mode 100644 index 7110ac8b0..000000000 --- a/tests/integration/bigquery_test/update-column-description/description_table.sql +++ /dev/null @@ -1,9 +0,0 @@ -{{ - config( - materialized='table', - persist_docs={ 'columns': true } - ) -}} - -select - 1 field diff --git a/tests/integration/bigquery_test/update-column-description/description_table.yml b/tests/integration/bigquery_test/update-column-description/description_table.yml deleted file mode 100644 index 28d68f007..000000000 --- a/tests/integration/bigquery_test/update-column-description/description_table.yml +++ /dev/null @@ -1,7 +0,0 @@ -version: 2 - -models: -- name: description_table - columns: - - name: field - description: '{{ var("field_description") }}' diff --git a/tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.sql b/tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.sql deleted file mode 100644 index 7110ac8b0..000000000 --- a/tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.sql +++ /dev/null @@ -1,9 +0,0 @@ -{{ - config( - materialized='table', - persist_docs={ 'columns': true } - ) -}} - -select - 1 field diff --git a/tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.yml b/tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.yml deleted file mode 100644 index 01a76c50d..000000000 --- a/tests/integration/bigquery_test/update-column-policy-tag/policy_tag_table.yml +++ /dev/null @@ -1,8 +0,0 @@ -version: 2 - -models: -- name: policy_tag_table - columns: - - name: field - policy_tags: - - '{{ var("policy_tag") }}' From 8339a034929b12e027f0a143abf46582f3f6ffbc Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Mon, 30 Jan 2023 15:30:23 -0700 Subject: [PATCH 16/16] Use IEC standard abbreviations (GiB, TiB, etc) (#482) * Use IEC standard abbreviations (GiB, TiB, etc) * Changelog entry --------- Co-authored-by: Florian Eiden --- .changes/unreleased/Fixes-20230125-174159.yaml | 7 +++++++ dbt/adapters/bigquery/connections.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Fixes-20230125-174159.yaml diff --git a/.changes/unreleased/Fixes-20230125-174159.yaml b/.changes/unreleased/Fixes-20230125-174159.yaml new file mode 100644 index 000000000..4100080d4 --- /dev/null +++ b/.changes/unreleased/Fixes-20230125-174159.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Use IEC standard abbreviations (GiB, TiB, etc) +time: 2023-01-25T17:41:59.921173-07:00 +custom: + Author: dbeatty10 + Issue: "477" + PR: "477" diff --git a/dbt/adapters/bigquery/connections.py b/dbt/adapters/bigquery/connections.py index 715dfb36a..7bcc63c4b 100644 --- a/dbt/adapters/bigquery/connections.py +++ b/dbt/adapters/bigquery/connections.py @@ -257,7 +257,7 @@ def commit(self): def format_bytes(self, num_bytes): if num_bytes: - for unit in ["Bytes", "KB", "MB", "GB", "TB", "PB"]: + for unit in ["Bytes", "KiB", "MiB", "GiB", "TiB", "PiB"]: if abs(num_bytes) < 1024.0: return f"{num_bytes:3.1f} {unit}" num_bytes /= 1024.0