Skip to content

Commit

Permalink
fix: failed samples should throw exception (#20228)
Browse files Browse the repository at this point in the history
  • Loading branch information
zhaoyongjie authored Jun 1, 2022
1 parent 9432c62 commit 1530c34
Show file tree
Hide file tree
Showing 5 changed files with 85 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
* under the License.
*/
import React, { useState, useEffect, useMemo } from 'react';
import { GenericDataType, styled, t } from '@superset-ui/core';
import { ensureIsArray, GenericDataType, styled, t } from '@superset-ui/core';
import Loading from 'src/components/Loading';
import { EmptyStateMedium } from 'src/components/EmptyState';
import TableView, { EmptyWrapperType } from 'src/components/TableView';
Expand Down Expand Up @@ -63,16 +63,19 @@ export const SamplesPane = ({
setIsLoading(true);
getDatasetSamples(datasource.id, queryForce)
.then(response => {
setData(response.data);
setColnames(response.colnames);
setColtypes(response.coltypes);
setData(ensureIsArray(response.data));
setColnames(ensureIsArray(response.colnames));
setColtypes(ensureIsArray(response.coltypes));
setResponseError('');
cache.add(datasource);
if (queryForce && actions) {
actions.setForceQuery(false);
}
})
.catch(error => {
setData([]);
setColnames([]);
setColtypes([]);
setResponseError(`${error.name}: ${error.message}`);
})
.finally(() => {
Expand Down
15 changes: 15 additions & 0 deletions superset/common/utils/query_cache_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,3 +187,18 @@ def set(
"""
if key:
set_and_log_cache(_cache[region], key, value, timeout, datasource_uid)

@staticmethod
def delete(
key: Optional[str],
region: CacheRegion = CacheRegion.DEFAULT,
) -> None:
if key:
_cache[region].delete(key)

@staticmethod
def has(
key: Optional[str],
region: CacheRegion = CacheRegion.DEFAULT,
) -> bool:
return bool(_cache[region].get(key)) if key else False
8 changes: 1 addition & 7 deletions superset/datasets/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -825,10 +825,4 @@ def samples(self, pk: int) -> Response:
except DatasetForbiddenError:
return self.response_403()
except DatasetSamplesFailedError as ex:
logger.error(
"Error get dataset samples %s: %s",
self.__class__.__name__,
str(ex),
exc_info=True,
)
return self.response_422(message=str(ex))
return self.response_400(message=str(ex))
11 changes: 10 additions & 1 deletion superset/datasets/commands/samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,17 @@
from superset.commands.base import BaseCommand
from superset.common.chart_data import ChartDataResultType
from superset.common.query_context_factory import QueryContextFactory
from superset.common.utils.query_cache_manager import QueryCacheManager
from superset.connectors.sqla.models import SqlaTable
from superset.constants import CacheRegion
from superset.datasets.commands.exceptions import (
DatasetForbiddenError,
DatasetNotFoundError,
DatasetSamplesFailedError,
)
from superset.datasets.dao import DatasetDAO
from superset.exceptions import SupersetSecurityException
from superset.utils.core import QueryStatus
from superset.views.base import check_ownership

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -58,7 +61,13 @@ def run(self) -> Dict[str, Any]:
)
results = qc_instance.get_payload()
try:
return results["queries"][0]
sample_data = results["queries"][0]
error_msg = sample_data.get("error")
if sample_data.get("status") == QueryStatus.FAILED and error_msg:
cache_key = sample_data.get("cache_key")
QueryCacheManager.delete(cache_key, region=CacheRegion.DATA)
raise DatasetSamplesFailedError(error_msg)
return sample_data
except (IndexError, KeyError) as exc:
raise DatasetSamplesFailedError from exc

Expand Down
52 changes: 52 additions & 0 deletions tests/integration_tests/datasets/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@
import yaml
from sqlalchemy.sql import func

from superset.common.utils.query_cache_manager import QueryCacheManager
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.constants import CacheRegion
from superset.dao.exceptions import (
DAOCreateFailedError,
DAODeleteFailedError,
Expand Down Expand Up @@ -1883,6 +1885,8 @@ def test_get_dataset_samples(self):
assert rv.status_code == 200
assert "result" in rv_data
assert rv_data["result"]["cached_dttm"] is not None
cache_key1 = rv_data["result"]["cache_key"]
assert QueryCacheManager.has(cache_key1, region=CacheRegion.DATA)

# 2. should through cache
uri2 = f"api/v1/dataset/{dataset.id}/samples?force=true"
Expand All @@ -1892,6 +1896,8 @@ def test_get_dataset_samples(self):
rv2 = self.client.get(uri2)
rv_data2 = json.loads(rv2.data)
assert rv_data2["result"]["cached_dttm"] is None
cache_key2 = rv_data2["result"]["cache_key"]
assert QueryCacheManager.has(cache_key2, region=CacheRegion.DATA)

# 3. data precision
assert "colnames" in rv_data2["result"]
Expand All @@ -1903,3 +1909,49 @@ def test_get_dataset_samples(self):
f' limit {self.app.config["SAMPLES_ROW_LIMIT"]}'
).to_dict(orient="records")
assert eager_samples == rv_data2["result"]["data"]

@pytest.mark.usefixtures("create_datasets")
def test_get_dataset_samples_with_failed_cc(self):
dataset = self.get_fixture_datasets()[0]

self.login(username="admin")
failed_column = TableColumn(
column_name="DUMMY CC",
type="VARCHAR(255)",
table=dataset,
expression="INCORRECT SQL",
)
uri = f"api/v1/dataset/{dataset.id}/samples"
dataset.columns.append(failed_column)
rv = self.client.get(uri)
assert rv.status_code == 400
rv_data = json.loads(rv.data)
assert "message" in rv_data
if dataset.database.db_engine_spec.engine_name == "PostgreSQL":
assert "INCORRECT SQL" in rv_data.get("message")

def test_get_dataset_samples_on_virtual_dataset(self):
virtual_dataset = SqlaTable(
table_name="virtual_dataset",
sql=("SELECT 'foo' as foo, 'bar' as bar"),
database=get_example_database(),
)
TableColumn(column_name="foo", type="VARCHAR(255)", table=virtual_dataset)
TableColumn(column_name="bar", type="VARCHAR(255)", table=virtual_dataset)
SqlMetric(metric_name="count", expression="count(*)", table=virtual_dataset)

self.login(username="admin")
uri = f"api/v1/dataset/{virtual_dataset.id}/samples"
rv = self.client.get(uri)
assert rv.status_code == 200
rv_data = json.loads(rv.data)
cache_key = rv_data["result"]["cache_key"]
assert QueryCacheManager.has(cache_key, region=CacheRegion.DATA)

# remove original column in dataset
virtual_dataset.sql = "SELECT 'foo' as foo"
rv = self.client.get(uri)
assert rv.status_code == 400

db.session.delete(virtual_dataset)
db.session.commit()

0 comments on commit 1530c34

Please sign in to comment.