Skip to content

Commit

Permalink
should pass all CI steps
Browse files Browse the repository at this point in the history
  • Loading branch information
zhaoyongjie committed Oct 15, 2022
1 parent 5ad4725 commit bba9277
Show file tree
Hide file tree
Showing 16 changed files with 183 additions and 121 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,12 @@ export const genericTime: ControlPanelSectionConfig = hasGenericChartAxes
],
};

export const legacyRegularTime: ControlPanelSectionConfig = {
...baseTimeSection,
controlSetRows: [['granularity_sqla'], ['time_range']],
};
export const legacyRegularTime: ControlPanelSectionConfig = hasGenericChartAxes
? { controlSetRows: [] }
: {
...baseTimeSection,
controlSetRows: [['granularity_sqla'], ['time_range']],
};

export const datasourceAndVizType: ControlPanelSectionConfig = {
label: t('Datasource & Chart Type'),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ export { default as getMetricLabel } from './getMetricLabel';
export { default as DatasourceKey } from './DatasourceKey';
export { default as normalizeOrderBy } from './normalizeOrderBy';
export { normalizeTimeColumn } from './normalizeTimeColumn';
export { getXAxis, isXAxisSet, hasGenericChartAxes } from './getXAxis';
export * from './getXAxis';

export * from './types/AnnotationLayer';
export * from './types/QueryFormData';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/
import { buildQueryContext } from '@superset-ui/core';
import * as queryModule from '../../src/query/normalizeTimeColumn';
import * as getXAxisModule from '../../src/query/getXAxis';

describe('buildQueryContext', () => {
it('should build datasource for table sources and apply defaults', () => {
Expand Down Expand Up @@ -98,6 +99,7 @@ describe('buildQueryContext', () => {
]),
);
});
// todo(Yongjie): move these test case into buildQueryObject.test.ts
it('should remove undefined value in post_processing', () => {
const queryContext = buildQueryContext(
{
Expand All @@ -124,12 +126,9 @@ describe('buildQueryContext', () => {
]);
});
it('should call normalizeTimeColumn if GENERIC_CHART_AXES is enabled and has x_axis', () => {
// @ts-ignore
const spy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: true,
},
}));
Object.defineProperty(getXAxisModule, 'hasGenericChartAxes', {
value: true,
});
const spyNormalizeTimeColumn = jest.spyOn(
queryModule,
'normalizeTimeColumn',
Expand All @@ -144,16 +143,12 @@ describe('buildQueryContext', () => {
() => [{}],
);
expect(spyNormalizeTimeColumn).toBeCalled();
spy.mockRestore();
spyNormalizeTimeColumn.mockRestore();
});
it("shouldn't call normalizeTimeColumn if GENERIC_CHART_AXES is disabled", () => {
// @ts-ignore
const spy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: false,
},
}));
Object.defineProperty(getXAxisModule, 'hasGenericChartAxes', {
value: false,
});
const spyNormalizeTimeColumn = jest.spyOn(
queryModule,
'normalizeTimeColumn',
Expand All @@ -167,7 +162,43 @@ describe('buildQueryContext', () => {
() => [{}],
);
expect(spyNormalizeTimeColumn).not.toBeCalled();
spy.mockRestore();
spyNormalizeTimeColumn.mockRestore();
});
it('should orverride time filter if GENERIC_CHART_AXES is enabled', () => {
Object.defineProperty(getXAxisModule, 'hasGenericChartAxes', {
value: true,
});

const queryContext = buildQueryContext(
{
datasource: '5__table',
viz_type: 'table',
},
() => [
{
filters: [
{
col: 'col1',
op: 'DATETIME_BETWEEN',
val: '2001 : 2002',
},
{
col: 'col2',
op: 'IN',
val: ['a', 'b'],
},
],
time_range: '1990 : 1991',
},
],
);
expect(queryContext.queries[0].filters).toEqual([
{ col: 'col1', op: 'DATETIME_BETWEEN', val: '1990 : 1991' },
{
col: 'col2',
op: 'IN',
val: ['a', 'b'],
},
]);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -18,54 +18,9 @@
*/
import { isXAxisSet } from '@superset-ui/core';

describe('GENERIC_CHART_AXES is enabled', () => {
let windowSpy: any;

beforeAll(() => {
// @ts-ignore
windowSpy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: true,
},
}));
});

afterAll(() => {
windowSpy.mockRestore();
});

it('isEnabledAxies when FF is disabled', () => {
expect(
isXAxisSet({ datasource: '123', viz_type: 'table' }),
).not.toBeTruthy();
expect(
isXAxisSet({ datasource: '123', viz_type: 'table', x_axis: 'axis' }),
).toBeTruthy();
});
});

describe('GENERIC_CHART_AXES is disabled', () => {
let windowSpy: any;

beforeAll(() => {
// @ts-ignore
windowSpy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: false,
},
}));
});

afterAll(() => {
windowSpy.mockRestore();
});

it('isEnabledAxies when FF is disabled', () => {
expect(
isXAxisSet({ datasource: '123', viz_type: 'table' }),
).not.toBeTruthy();
expect(
isXAxisSet({ datasource: '123', viz_type: 'table', x_axis: 'axis' }),
).toBeTruthy();
});
test('isXAxisSet', () => {
expect(isXAxisSet({ datasource: '123', viz_type: 'table' })).not.toBeTruthy();
expect(
isXAxisSet({ datasource: '123', viz_type: 'table', x_axis: 'axis' }),
).toBeTruthy();
});
24 changes: 15 additions & 9 deletions superset/common/query_context_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from superset.common.query_actions import get_query_results
from superset.common.utils import dataframe_utils
from superset.common.utils.query_cache_manager import QueryCacheManager
from superset.common.utils.time_range_utils import get_since_until_from_query_object
from superset.connectors.base.models import BaseDatasource
from superset.constants import CacheRegion
from superset.exceptions import (
Expand All @@ -56,6 +57,7 @@
get_column_names_from_columns,
get_column_names_from_metrics,
get_metric_names,
get_xaxis_label,
normalize_dttm_col,
TIME_COMPARISON,
)
Expand Down Expand Up @@ -314,8 +316,14 @@ def processing_time_offsets( # pylint: disable=too-many-locals,too-many-stateme
rv_dfs: List[pd.DataFrame] = [df]

time_offsets = query_object.time_offsets
outer_from_dttm = query_object.from_dttm
outer_to_dttm = query_object.to_dttm
outer_from_dttm, outer_to_dttm = get_since_until_from_query_object(query_object)
if not outer_from_dttm or not outer_to_dttm:
raise QueryObjectValidationError(
_(
"An enclosed time range (both start and end) must be specified "
"when using a Time Comparison."
)
)
for offset in time_offsets:
try:
query_object_clone.from_dttm = get_past_or_future(
Expand All @@ -330,14 +338,12 @@ def processing_time_offsets( # pylint: disable=too-many-locals,too-many-stateme
query_object_clone.inner_to_dttm = outer_to_dttm
query_object_clone.time_offsets = []
query_object_clone.post_processing = []
query_object_clone.filter = [
flt
for flt in query_object_clone.filter
if flt.get("col") != get_xaxis_label(query_object.columns)
]

if not query_object.from_dttm or not query_object.to_dttm:
raise QueryObjectValidationError(
_(
"An enclosed time range (both start and end) must be specified "
"when using a Time Comparison."
)
)
# `offset` is added to the hash function
cache_key = self.query_cache_key(query_object_clone, time_offset=offset)
cache = QueryCacheManager.get(
Expand Down
8 changes: 4 additions & 4 deletions superset/common/query_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import json
import logging
from datetime import datetime, timedelta
from datetime import datetime
from pprint import pformat
from typing import Any, Dict, List, NamedTuple, Optional, TYPE_CHECKING

Expand All @@ -46,7 +46,6 @@
json_int_dttm_ser,
QueryObjectFilterClause,
)
from superset.utils.date_parser import parse_human_timedelta
from superset.utils.hashing import md5_sha_from_dict

if TYPE_CHECKING:
Expand Down Expand Up @@ -106,7 +105,7 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
series_limit: int
series_limit_metric: Optional[Metric]
time_offsets: List[str]
time_shift: Optional[timedelta]
time_shift: Optional[str]
time_range: Optional[str]
to_dttm: Optional[datetime]

Expand Down Expand Up @@ -156,7 +155,7 @@ def __init__( # pylint: disable=too-many-locals
self.series_limit = series_limit
self.series_limit_metric = series_limit_metric
self.time_range = time_range
self.time_shift = parse_human_timedelta(time_shift)
self.time_shift = time_shift
self.from_dttm = kwargs.get("from_dttm")
self.to_dttm = kwargs.get("to_dttm")
self.result_type = kwargs.get("result_type")
Expand Down Expand Up @@ -336,6 +335,7 @@ def to_dict(self) -> Dict[str, Any]:
"series_limit": self.series_limit,
"series_limit_metric": self.series_limit_metric,
"to_dttm": self.to_dttm,
"time_shift": self.time_shift,
}
return query_object_dict

Expand Down
27 changes: 5 additions & 22 deletions superset/common/query_object_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,12 @@
# under the License.
from __future__ import annotations

from datetime import datetime
from typing import Any, Dict, Optional, Tuple, TYPE_CHECKING
from typing import Any, Dict, Optional, TYPE_CHECKING

from superset import app
from superset.common.chart_data import ChartDataResultType
from superset.common.query_object import QueryObject
from superset.common.utils.time_range_utils import get_since_until_from_time_range
from superset.utils.core import apply_max_row_limit, DatasourceDict, DatasourceType
from superset.utils.date_parser import get_since_until

if TYPE_CHECKING:
from sqlalchemy.orm import sessionmaker
Expand Down Expand Up @@ -63,7 +61,9 @@ def create( # pylint: disable=too-many-arguments
processed_extras = self._process_extras(extras)
result_type = kwargs.setdefault("result_type", parent_result_type)
row_limit = self._process_row_limit(row_limit, result_type)
from_dttm, to_dttm = self._get_dttms(time_range, time_shift, processed_extras)
from_dttm, to_dttm = get_since_until_from_time_range(
time_range, time_shift, processed_extras
)
kwargs["from_dttm"] = from_dttm
kwargs["to_dttm"] = to_dttm
return QueryObject(
Expand Down Expand Up @@ -99,23 +99,6 @@ def _process_row_limit(
)
return apply_max_row_limit(row_limit or default_row_limit)

@staticmethod
def _get_dttms(
time_range: Optional[str],
time_shift: Optional[str] = None,
extras: Optional[Dict[str, Any]] = None,
) -> Tuple[Optional[datetime], Optional[datetime]]:
return get_since_until(
relative_start=(extras or {}).get(
"relative_start", app.config["DEFAULT_RELATIVE_START_TIME"]
),
relative_end=(extras or {}).get(
"relative_end", app.config["DEFAULT_RELATIVE_END_TIME"]
),
time_range=time_range,
time_shift=time_shift,
)

# light version of the view.utils.core
# import view.utils require application context
# Todo: move it and the view.utils.core to utils package
Loading

0 comments on commit bba9277

Please sign in to comment.