Skip to content

Commit

Permalink
Enable group limit tests for Databricks 13.3.
Browse files Browse the repository at this point in the history
  • Loading branch information
mythrocks committed Mar 5, 2024
1 parent ec4fbcf commit b6f6204
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 6 deletions.
3 changes: 3 additions & 0 deletions integration_tests/src/main/python/spark_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,9 @@ def is_databricks113_or_later():
def is_databricks122_or_later():
return is_databricks_version_or_later(12, 2)

def is_databricks133_or_later():
return is_databricks_version_or_later(13, 3)

def supports_delta_lake_deletion_vectors():
if is_databricks_runtime():
return is_databricks122_or_later()
Expand Down
14 changes: 8 additions & 6 deletions integration_tests/src/main/python/window_function_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from pyspark.sql.types import DateType, TimestampType, NumericType
from pyspark.sql.window import Window
import pyspark.sql.functions as f
from spark_session import is_before_spark_320, is_before_spark_350, is_databricks113_or_later, spark_version, with_cpu_session
from spark_session import is_spark_350_or_later, is_before_spark_320, is_databricks113_or_later, is_databricks133_or_later, spark_version, with_cpu_session
import warnings

_grpkey_longs_with_no_nulls = [
Expand Down Expand Up @@ -2042,8 +2042,9 @@ def assert_query_runs_on(exec, conf):
assert_query_runs_on(exec='GpuBatchedBoundedWindowExec', conf=conf_200)


@pytest.mark.skipif(condition=is_before_spark_350(),
reason="WindowGroupLimit not available for spark.version < 3.5")
@pytest.mark.skipif(condition=not (is_spark_350_or_later() or is_databricks133_or_later()),
reason="WindowGroupLimit not available for spark.version < 3.5 "
"and Databricks version < 13.3")
@ignore_order(local=True)
@approximate_float
@pytest.mark.parametrize('batch_size', ['1k', '1g'], ids=idfn)
Expand Down Expand Up @@ -2087,12 +2088,13 @@ def test_window_group_limits_for_ranking_functions(data_gen, batch_size, rank_cl
lambda spark: gen_df(spark, data_gen, length=4096),
"window_agg_table",
query,
conf = conf)
conf=conf)


@allow_non_gpu('WindowGroupLimitExec')
@pytest.mark.skipif(condition=is_before_spark_350(),
reason="WindowGroupLimit not available for spark.version < 3.5")
@pytest.mark.skipif(condition=not (is_spark_350_or_later() or is_databricks133_or_later()),
reason="WindowGroupLimit not available for spark.version < 3.5 "
" and Databricks version < 13.3")
@ignore_order(local=True)
@approximate_float
def test_window_group_limits_fallback_for_row_number():
Expand Down

0 comments on commit b6f6204

Please sign in to comment.