Skip to content

Commit

Permalink
fix: don't reformat generated queries (#30350)
Browse files Browse the repository at this point in the history
(cherry picked from commit 0b34197)
  • Loading branch information
betodealmeida authored and sadpandajoe committed Oct 4, 2024
1 parent 7519cab commit 4b9ae07
Show file tree
Hide file tree
Showing 11 changed files with 29 additions and 50 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ describe('AdhocMetrics', () => {
});

it('Clear metric and set simple adhoc metric', () => {
const metric = 'SUM(num_girls)';
const metric = 'sum(num_girls)';
const metricName = 'Sum Girls';
cy.get('[data-test=metrics]')
.find('[data-test="remove-control-button"]')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ describe('Visualization > Table', () => {
});
cy.verifySliceSuccess({
waitAlias: '@chartData',
querySubstring: /group by\n.*name/i,
querySubstring: /GROUP BY.*name/i,
chartSelector: 'table',
});
});
Expand Down Expand Up @@ -246,7 +246,7 @@ describe('Visualization > Table', () => {
cy.visitChartByParams(formData);
cy.verifySliceSuccess({
waitAlias: '@chartData',
querySubstring: /group by\n.*state/i,
querySubstring: /GROUP BY.*state/i,
chartSelector: 'table',
});
cy.get('td').contains(/\d*%/);
Expand Down
7 changes: 1 addition & 6 deletions superset/models/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,11 @@
ColumnNotFoundException,
QueryClauseValidationException,
QueryObjectValidationError,
SupersetParseError,
SupersetSecurityException,
)
from superset.extensions import feature_flag_manager
from superset.jinja_context import BaseTemplateProcessor
from superset.sql.parse import SQLScript, SQLStatement
from superset.sql.parse import SQLScript
from superset.sql_parse import (
has_table_query,
insert_rls_in_predicate,
Expand Down Expand Up @@ -870,10 +869,6 @@ def get_query_str_extended(
sqlaq = self.get_sqla_query(**query_obj)
sql = self.database.compile_sqla_query(sqlaq.sqla_query)
sql = self._apply_cte(sql, sqlaq.cte)
try:
sql = SQLStatement(sql, engine=self.db_engine_spec.engine).format()
except SupersetParseError:
logger.warning("Unable to parse SQL to format it, passing it as-is")

if mutate:
sql = self.database.mutate_sql_based_on_config(sql)
Expand Down
8 changes: 4 additions & 4 deletions tests/integration_tests/charts/data/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -716,7 +716,7 @@ def test_when_where_parameter_is_template_and_query_result_type__query_is_templa
rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
result = rv.json["result"][0]["query"]
if get_example_database().backend != "presto":
assert "(\n 'boy' = 'boy'\n )" in result
assert "('boy' = 'boy')" in result

@with_feature_flags(GLOBAL_ASYNC_QUERIES=True)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
Expand Down Expand Up @@ -1346,13 +1346,13 @@ def test_time_filter_with_grain(test_client, login_as_admin, physical_query_cont
backend = get_example_database().backend
if backend == "sqlite":
assert (
"DATETIME(col5, 'start of day', -STRFTIME('%w', col5) || ' days') >="
"DATETIME(col5, 'start of day', -strftime('%w', col5) || ' days') >="
in query
)
elif backend == "mysql":
assert "DATE(DATE_SUB(col5, INTERVAL (DAYOFWEEK(col5) - 1) DAY)) >=" in query
assert "DATE(DATE_SUB(col5, INTERVAL DAYOFWEEK(col5) - 1 DAY)) >=" in query
elif backend == "postgresql":
assert "DATE_TRUNC('WEEK', col5) >=" in query
assert "DATE_TRUNC('week', col5) >=" in query
elif backend == "presto":
assert "date_trunc('week', CAST(col5 AS TIMESTAMP)) >=" in query

Expand Down
2 changes: 1 addition & 1 deletion tests/integration_tests/datasource_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -683,7 +683,7 @@ def test_get_samples_with_multiple_filters(
assert "2000-01-02" in rv.json["result"]["query"]
assert "2000-01-04" in rv.json["result"]["query"]
assert "col3 = 1.2" in rv.json["result"]["query"]
assert "col4 IS NULL" in rv.json["result"]["query"]
assert "col4 is null" in rv.json["result"]["query"]
assert "col2 = 'c'" in rv.json["result"]["query"]


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ def test_calculated_column_in_order_by_base_engine_spec(self):
}
sql = table.get_query_str(query_obj)
assert (
"ORDER BY\n CASE WHEN gender = 'boy' THEN 'male' ELSE 'female' END ASC"
"ORDER BY \n case\n when gender='boy' then 'male'\n else 'female'\n end\n ASC"
in sql
)

Expand Down
2 changes: 1 addition & 1 deletion tests/integration_tests/db_engine_specs/bigquery_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,4 +380,4 @@ def test_calculated_column_in_order_by(self):
"orderby": [["gender_cc", True]],
}
sql = table.get_query_str(query_obj)
assert "ORDER BY\n `gender_cc` ASC" in sql
assert "ORDER BY `gender_cc` ASC" in sql
9 changes: 6 additions & 3 deletions tests/integration_tests/query_context_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ def test_query_response_type(self):
sql_text = get_sql_text(payload)

assert "SELECT" in sql_text
assert re.search(r'NOT [`"\[]?num[`"\]]? IS NULL', sql_text)
assert re.search(r'[`"\[]?num[`"\]]? IS NOT NULL', sql_text)
assert re.search(
r"""NOT \([\s\n]*[`"\[]?name[`"\]]? IS NULL[\s\n]* """
r"""OR [`"\[]?name[`"\]]? IN \('"abc"'\)[\s\n]*\)""",
Expand Down Expand Up @@ -1161,16 +1161,19 @@ def test_time_offset_with_temporal_range_filter(app_context, physical_dataset):
OFFSET 0
"""
assert (
re.search(r"WHERE\n col6 >= .*2002-01-01", sqls[0])
re.search(r"WHERE col6 >= .*2002-01-01", sqls[0])
and re.search(r"AND col6 < .*2003-01-01", sqls[0])
) is not None
assert (
re.search(r"WHERE\n col6 >= .*2001-10-01", sqls[1])
re.search(r"WHERE col6 >= .*2001-10-01", sqls[1])
and re.search(r"AND col6 < .*2002-10-01", sqls[1])
) is not None


def test_virtual_dataset_with_comments(app_context, virtual_dataset_with_comments):
if backend() == "mysql":
return

qc = QueryContextFactory().create(
datasource={
"type": virtual_dataset_with_comments.type,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def test_rls_filter_alters_gamma_birth_names_query(self):
# establish that the filters are grouped together correctly with
# ANDs, ORs and parens in the correct place
assert (
"WHERE\n (\n (\n name LIKE 'A%' OR name LIKE 'B%'\n ) OR (\n name LIKE 'Q%'\n )\n )\n AND (\n gender = 'boy'\n )"
"WHERE ((name like 'A%' or name like 'B%') OR (name like 'Q%')) AND (gender = 'boy');"
in sql
)

Expand Down
2 changes: 1 addition & 1 deletion tests/integration_tests/sqla_models_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -803,7 +803,7 @@ def test_none_operand_in_filter(login_as_admin, physical_dataset):
{
"operator": FilterOperator.NOT_EQUALS.value,
"count": 0,
"sql_should_contain": "NOT COL4 IS NULL",
"sql_should_contain": "COL4 IS NOT NULL",
},
]
for expected in expected_results:
Expand Down
39 changes: 10 additions & 29 deletions tests/unit_tests/jinja_context_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,48 +467,29 @@ def test_dataset_macro(mocker: MockerFixture) -> None:
return_value=[],
)

space = " "

assert (
dataset_macro(1)
== """(
SELECT
ds AS ds,
num_boys AS num_boys,
revenue AS revenue,
expenses AS expenses,
revenue - expenses AS profit
== f"""(
SELECT ds AS ds, num_boys AS num_boys, revenue AS revenue, expenses AS expenses, revenue-expenses AS profit{space}
FROM my_schema.old_dataset
) AS dataset_1"""
)

assert (
dataset_macro(1, include_metrics=True)
== """(
SELECT
ds AS ds,
num_boys AS num_boys,
revenue AS revenue,
expenses AS expenses,
revenue - expenses AS profit,
COUNT(*) AS cnt
FROM my_schema.old_dataset
GROUP BY
ds,
num_boys,
revenue,
expenses,
revenue - expenses
== f"""(
SELECT ds AS ds, num_boys AS num_boys, revenue AS revenue, expenses AS expenses, revenue-expenses AS profit, COUNT(*) AS cnt{space}
FROM my_schema.old_dataset GROUP BY ds, num_boys, revenue, expenses, revenue-expenses
) AS dataset_1"""
)

assert (
dataset_macro(1, include_metrics=True, columns=["ds"])
== """(
SELECT
ds AS ds,
COUNT(*) AS cnt
FROM my_schema.old_dataset
GROUP BY
ds
== f"""(
SELECT ds AS ds, COUNT(*) AS cnt{space}
FROM my_schema.old_dataset GROUP BY ds
) AS dataset_1"""
)

Expand Down

0 comments on commit 4b9ae07

Please sign in to comment.