diff --git a/CHANGELOG.md b/CHANGELOG.md index 34657f0ba60b0..9aa311f32a667 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,16 @@ specific language governing permissions and limitations under the License. --> ## Change Log + +### 1.4.2 (Sat Mar 19 00:08:06 2022 +0200) +**Features** +- [#19248](https://github.com/apache/superset/pull/19248) feat: add support for comments in adhoc clauses (@villebro) +- [#18214](https://github.com/apache/superset/pull/18214) feat(docker-compose): add TAG option (@villebro) + +**Fixes** +- [#17641](https://github.com/apache/superset/pull/17641) fix(sqla): make text clause escaping optional (@villebro) +- [#18566](https://github.com/apache/superset/pull/18566) fix(plugin-chart-echarts): area chart opacity bug (@villebro) + ### 1.4.1 **Database Migrations** diff --git a/docs/docs/contributing/contributing-page.mdx b/docs/docs/contributing/contributing-page.mdx index d6cdd2cecfd3a..f4f3cd6400fb6 100644 --- a/docs/docs/contributing/contributing-page.mdx +++ b/docs/docs/contributing/contributing-page.mdx @@ -8,8 +8,8 @@ version: 1 ## Contributing to Superset Superset is an [Apache Software foundation](https://www.apache.org/theapacheway/index.html) project. -The core contributors (or committers) to Superset communicate primarily in the following channels (all of -which you can join): +The core contributors (or committers) to Superset communicate primarily in the following channels ( +which can be joined by anyone): - [Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org) - [Apache Superset Slack community](https://join.slack.com/t/apache-superset/shared_invite/zt-16jvzmoi8-sI7jKWp~xc2zYRe~NqiY9Q) diff --git a/scripts/benchmark_migration.py b/scripts/benchmark_migration.py index 27670b5d4d729..baae8befec1bc 100644 --- a/scripts/benchmark_migration.py +++ b/scripts/benchmark_migration.py @@ -102,7 +102,10 @@ def find_models(module: ModuleType) -> List[Type[Model]]: while tables: table = tables.pop() seen.add(table) - model = getattr(Base.classes, table) + try: + model = getattr(Base.classes, table) + except AttributeError: + continue model.__tablename__ = table models.append(model) diff --git a/setup.py b/setup.py index 81068e1f860da..c7cdd2acd2185 100644 --- a/setup.py +++ b/setup.py @@ -23,8 +23,8 @@ from setuptools import find_packages, setup BASE_DIR = os.path.abspath(os.path.dirname(__file__)) - PACKAGE_JSON = os.path.join(BASE_DIR, "superset-frontend", "package.json") + with open(PACKAGE_JSON, "r") as package_file: version_string = json.load(package_file)["version"] @@ -111,6 +111,7 @@ def get_git_sha() -> str: "slackclient==2.5.0", # PINNED! slack changes file upload api in the future versions "sqlalchemy>=1.3.16, <1.4, !=1.3.21", "sqlalchemy-utils>=0.37.8, <0.38", + "sqloxide==0.1.15", "sqlparse==0.3.0", # PINNED! see https://github.com/andialbrecht/sqlparse/issues/562 "tabulate==0.8.9", # needed to support Literal (3.8) and TypeGuard (3.10) diff --git a/superset-embedded-sdk/package.json b/superset-embedded-sdk/package.json index 88642e72327f3..49debb4ad321f 100644 --- a/superset-embedded-sdk/package.json +++ b/superset-embedded-sdk/package.json @@ -1,6 +1,6 @@ { "name": "@superset-ui/embedded-sdk", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "description": "SDK for embedding resources from Superset into your own application", "access": "public", "keywords": [ diff --git a/superset-embedded-sdk/src/index.ts b/superset-embedded-sdk/src/index.ts index 34932bd6250d7..32b02641e00d2 100644 --- a/superset-embedded-sdk/src/index.ts +++ b/superset-embedded-sdk/src/index.ts @@ -131,7 +131,7 @@ export async function embedDashboard({ resolve(new Switchboard({ port: ourPort, name: 'superset-embedded-sdk', debug })); }); - iframe.src = `${supersetDomain}/dashboard/${id}/embedded${dashboardConfig}`; + iframe.src = `${supersetDomain}/embedded/${id}${dashboardConfig}`; mountPoint.replaceChildren(iframe); log('placed the iframe') }); diff --git a/superset-frontend/.eslintrc.js b/superset-frontend/.eslintrc.js index 8decc32ee7dc8..facd3431f4367 100644 --- a/superset-frontend/.eslintrc.js +++ b/superset-frontend/.eslintrc.js @@ -194,6 +194,7 @@ module.exports = { 'fixtures.*', 'cypress-base/cypress/**/*', 'Stories.tsx', + 'packages/superset-ui-core/src/style/index.tsx', ], rules: { 'theme-colors/no-literal-colors': 0, diff --git a/superset-frontend/src/dashboard/components/DashboardEmbedControls.tsx b/superset-frontend/src/dashboard/components/DashboardEmbedControls.tsx new file mode 100644 index 0000000000000..d20cc2460090b --- /dev/null +++ b/superset-frontend/src/dashboard/components/DashboardEmbedControls.tsx @@ -0,0 +1,228 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React, { useCallback, useEffect, useState } from 'react'; +import { makeApi, styled, SupersetApiError, t } from '@superset-ui/core'; +import { InfoTooltipWithTrigger } from '@superset-ui/chart-controls'; +import Modal from 'src/components/Modal'; +import Loading from 'src/components/Loading'; +import Button from 'src/components/Button'; +import { Input } from 'src/components/Input'; +import { useToasts } from 'src/components/MessageToasts/withToasts'; +import { FormItem } from 'src/components/Form'; +import { EmbeddedDashboard } from '../types'; + +type Props = { + dashboardId: string; + show: boolean; + onHide: () => void; +}; + +type EmbeddedApiPayload = { allowed_domains: string[] }; + +const stringToList = (stringyList: string): string[] => + stringyList.split(/(?:\s|,)+/).filter(x => x); + +const ButtonRow = styled.div` + display: flex; + flex-direction: row; + justify-content: flex-end; +`; + +export const DashboardEmbedControls = ({ dashboardId, onHide }: Props) => { + const { addInfoToast, addDangerToast } = useToasts(); + const [ready, setReady] = useState(true); // whether we have initialized yet + const [loading, setLoading] = useState(false); // whether we are currently doing an async thing + const [embedded, setEmbedded] = useState(null); // the embedded dashboard config + const [allowedDomains, setAllowedDomains] = useState(''); + + const endpoint = `/api/v1/dashboard/${dashboardId}/embedded`; + // whether saveable changes have been made to the config + const isDirty = + !embedded || + stringToList(allowedDomains).join() !== embedded.allowed_domains.join(); + + const enableEmbedded = useCallback(() => { + setLoading(true); + makeApi({ + method: 'POST', + endpoint, + })({ + allowed_domains: stringToList(allowedDomains), + }) + .then( + ({ result }) => { + setEmbedded(result); + setAllowedDomains(result.allowed_domains.join(', ')); + addInfoToast(t('Changes saved.')); + }, + err => { + console.error(err); + addDangerToast( + t( + t('Sorry, something went wrong. The changes could not be saved.'), + ), + ); + }, + ) + .finally(() => { + setLoading(false); + }); + }, [endpoint, allowedDomains]); + + const disableEmbedded = useCallback(() => { + Modal.confirm({ + title: t('Disable embedding?'), + content: t('This will remove your current embed configuration.'), + okType: 'danger', + onOk: () => { + setLoading(true); + makeApi<{}>({ method: 'DELETE', endpoint })({}) + .then( + () => { + setEmbedded(null); + setAllowedDomains(''); + addInfoToast(t('Embedding deactivated.')); + onHide(); + }, + err => { + console.error(err); + addDangerToast( + t( + 'Sorry, something went wrong. Embedding could not be deactivated.', + ), + ); + }, + ) + .finally(() => { + setLoading(false); + }); + }, + }); + }, [endpoint]); + + useEffect(() => { + setReady(false); + makeApi<{}, { result: EmbeddedDashboard }>({ + method: 'GET', + endpoint, + })({}) + .catch(err => { + if ((err as SupersetApiError).status === 404) { + // 404 just means the dashboard isn't currently embedded + return { result: null }; + } + throw err; + }) + .then(({ result }) => { + setReady(true); + setEmbedded(result); + setAllowedDomains(result ? result.allowed_domains.join(', ') : ''); + }); + }, [dashboardId]); + + if (!ready) { + return ; + } + + return ( + <> +

+ {embedded ? ( + <> + {t( + 'This dashboard is ready to embed. In your application, pass the following id to the SDK:', + )} +
+ {embedded.uuid} + + ) : ( + t( + 'Configure this dashboard to embed it into an external web application.', + ) + )} +

+

+ {t('For further instructions, consult the')}{' '} + + {t('Superset Embedded SDK documentation.')} + +

+

Settings

+ + + setAllowedDomains(event.target.value)} + /> + + + {embedded ? ( + <> + + + + ) : ( + + )} + + + ); +}; + +export const DashboardEmbedModal = (props: Props) => { + const { show, onHide } = props; + + return ( + + + + ); +}; diff --git a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx index 9375c684af90a..619e10ea22e80 100644 --- a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx +++ b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx @@ -59,11 +59,13 @@ const propTypes = { userCanEdit: PropTypes.bool.isRequired, userCanShare: PropTypes.bool.isRequired, userCanSave: PropTypes.bool.isRequired, + userCanCurate: PropTypes.bool.isRequired, isLoading: PropTypes.bool.isRequired, layout: PropTypes.object.isRequired, expandedSlices: PropTypes.object.isRequired, onSave: PropTypes.func.isRequired, showPropertiesModal: PropTypes.func.isRequired, + manageEmbedded: PropTypes.func.isRequired, refreshLimit: PropTypes.number, refreshWarning: PropTypes.string, lastModifiedTime: PropTypes.number.isRequired, @@ -88,6 +90,7 @@ const MENU_KEYS = { EDIT_CSS: 'edit-css', DOWNLOAD_AS_IMAGE: 'download-as-image', TOGGLE_FULLSCREEN: 'toggle-fullscreen', + MANAGE_EMBEDDED: 'manage-embedded', }; const DropdownButton = styled.div` @@ -182,6 +185,10 @@ class HeaderActionsDropdown extends React.PureComponent { window.location.replace(url); break; } + case MENU_KEYS.MANAGE_EMBEDDED: { + this.props.manageEmbedded(); + break; + } default: break; } @@ -204,6 +211,7 @@ class HeaderActionsDropdown extends React.PureComponent { userCanEdit, userCanShare, userCanSave, + userCanCurate, isLoading, refreshLimit, refreshWarning, @@ -313,6 +321,12 @@ class HeaderActionsDropdown extends React.PureComponent { )} + {!editMode && userCanCurate && ( + + {t('Embed dashboard')} + + )} + {!editMode && ( {t('Download as image')} diff --git a/superset-frontend/src/dashboard/components/Header/index.jsx b/superset-frontend/src/dashboard/components/Header/index.jsx index 7fd1afc82eb3b..ad668daf79a53 100644 --- a/superset-frontend/src/dashboard/components/Header/index.jsx +++ b/superset-frontend/src/dashboard/components/Header/index.jsx @@ -52,7 +52,9 @@ import setPeriodicRunner, { stopPeriodicRender, } from 'src/dashboard/util/setPeriodicRunner'; import { options as PeriodicRefreshOptions } from 'src/dashboard/components/RefreshIntervalModal'; +import findPermission from 'src/dashboard/util/findPermission'; import { FILTER_BOX_MIGRATION_STATES } from 'src/explore/constants'; +import { DashboardEmbedModal } from '../DashboardEmbedControls'; const propTypes = { addSuccessToast: PropTypes.func.isRequired, @@ -420,6 +422,14 @@ class Header extends React.PureComponent { this.setState({ showingReportModal: false }); } + showEmbedModal = () => { + this.setState({ showingEmbedModal: true }); + }; + + hideEmbedModal = () => { + this.setState({ showingEmbedModal: false }); + }; + renderReportModal() { const attachedReportExists = !!Object.keys(this.props.reports).length; return attachedReportExists ? ( @@ -498,6 +508,9 @@ class Header extends React.PureComponent { const userCanSaveAs = dashboardInfo.dash_save_perm && filterboxMigrationState !== FILTER_BOX_MIGRATION_STATES.REVIEWING; + const userCanCurate = + isFeatureEnabled(FeatureFlag.EMBEDDED_SUPERSET) && + findPermission('can_set_embedded', 'Dashboard', user.roles); const shouldShowReport = !editMode && this.canAddReports(); const refreshLimit = dashboardInfo.common?.conf?.SUPERSET_DASHBOARD_PERIODICAL_REFRESH_LIMIT; @@ -658,6 +671,14 @@ class Header extends React.PureComponent { /> )} + {userCanCurate && ( + + )} + { +type PageProps = { + idOrSlug: string; +}; + +export const DashboardPage: FC = ({ idOrSlug }: PageProps) => { const dispatch = useDispatch(); const theme = useTheme(); const user = useSelector( state => state.user, ); const { addDangerToast } = useToasts(); - const { idOrSlug } = useParams<{ idOrSlug: string }>(); const { result: dashboard, error: dashboardApiError } = useDashboard(idOrSlug); const { result: charts, error: chartsApiError } = diff --git a/superset-frontend/src/dashboard/containers/DashboardRoute.tsx b/superset-frontend/src/dashboard/containers/DashboardRoute.tsx new file mode 100644 index 0000000000000..a382a28d4586f --- /dev/null +++ b/superset-frontend/src/dashboard/containers/DashboardRoute.tsx @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React, { FC } from 'react'; +import { useParams } from 'react-router-dom'; +import { DashboardPage } from './DashboardPage'; + +const DashboardRoute: FC = () => { + const { idOrSlug } = useParams<{ idOrSlug: string }>(); + return ; +}; + +export default DashboardRoute; diff --git a/superset-frontend/src/dashboard/types.ts b/superset-frontend/src/dashboard/types.ts index dffbd9fbe0be8..c0b312d434423 100644 --- a/superset-frontend/src/dashboard/types.ts +++ b/superset-frontend/src/dashboard/types.ts @@ -152,3 +152,9 @@ export type DashboardPermalinkValue = { hash: string; }; }; + +export type EmbeddedDashboard = { + uuid: string; + dashboard_id: string; + allowed_domains: string[]; +}; diff --git a/superset-frontend/src/embedded/index.tsx b/superset-frontend/src/embedded/index.tsx index 3cd89a88be225..afea2fd8bb94b 100644 --- a/superset-frontend/src/embedded/index.tsx +++ b/superset-frontend/src/embedded/index.tsx @@ -45,18 +45,22 @@ const LazyDashboardPage = lazy( ), ); +const EmbeddedRoute = () => ( + }> + + + + + + + +); + const EmbeddedApp = () => ( - - }> - - - - - - - - + {/* todo (embedded) remove this line after uuids are deployed */} + + ); @@ -64,9 +68,9 @@ const appMountPoint = document.getElementById('app')!; const MESSAGE_TYPE = '__embedded_comms__'; -if (!window.parent) { +if (!window.parent || window.parent === window) { appMountPoint.innerHTML = - 'This page is intended to be embedded in an iframe, but no window.parent was found.'; + 'This page is intended to be embedded in an iframe, but it looks like that is not the case.'; } // if the page is embedded in an origin that hasn't diff --git a/superset-frontend/src/hooks/apiResources/dashboards.ts b/superset-frontend/src/hooks/apiResources/dashboards.ts index b5b59d4ef4ef0..9f512d5b15b2f 100644 --- a/superset-frontend/src/hooks/apiResources/dashboards.ts +++ b/superset-frontend/src/hooks/apiResources/dashboards.ts @@ -17,7 +17,7 @@ * under the License. */ -import { Dashboard, Datasource } from 'src/dashboard/types'; +import { Dashboard, Datasource, EmbeddedDashboard } from 'src/dashboard/types'; import { Chart } from 'src/types/Chart'; import { useApiV1Resource, useTransformedResource } from './apiResources'; @@ -42,3 +42,6 @@ export const useDashboardCharts = (idOrSlug: string | number) => // that are necessary for rendering the given dashboard export const useDashboardDatasets = (idOrSlug: string | number) => useApiV1Resource(`/api/v1/dashboard/${idOrSlug}/datasets`); + +export const useEmbeddedDashboard = (idOrSlug: string | number) => + useApiV1Resource(`/api/v1/dashboard/${idOrSlug}/embedded`); diff --git a/superset-frontend/src/preamble.ts b/superset-frontend/src/preamble.ts index 7bd6dbe29b365..8d89104bf2098 100644 --- a/superset-frontend/src/preamble.ts +++ b/superset-frontend/src/preamble.ts @@ -37,6 +37,9 @@ export let bootstrapData: { user?: User | undefined; common?: any; config?: any; + embedded?: { + dashboard_id: string; + }; } = {}; // Configure translation if (typeof window !== 'undefined') { diff --git a/superset-frontend/src/views/routes.tsx b/superset-frontend/src/views/routes.tsx index 255dc99da7796..525860ecef066 100644 --- a/superset-frontend/src/views/routes.tsx +++ b/superset-frontend/src/views/routes.tsx @@ -57,10 +57,10 @@ const DashboardList = lazy( /* webpackChunkName: "DashboardList" */ 'src/views/CRUD/dashboard/DashboardList' ), ); -const DashboardPage = lazy( +const DashboardRoute = lazy( () => import( - /* webpackChunkName: "DashboardPage" */ 'src/dashboard/containers/DashboardPage' + /* webpackChunkName: "DashboardRoute" */ 'src/dashboard/containers/DashboardRoute' ), ); const DatabaseList = lazy( @@ -112,7 +112,7 @@ export const routes: Routes = [ }, { path: '/superset/dashboard/:idOrSlug/', - Component: DashboardPage, + Component: DashboardRoute, }, { path: '/chart/list/', diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py index 202088c853ed7..c87e878fddefe 100644 --- a/superset/common/query_context_processor.py +++ b/superset/common/query_context_processor.py @@ -471,9 +471,9 @@ def get_viz_annotation_data( annotation_layer: Dict[str, Any], force: bool ) -> Dict[str, Any]: chart = ChartDAO.find_by_id(annotation_layer["value"]) - form_data = chart.form_data.copy() if not chart: raise QueryObjectValidationError(_("The chart does not exist")) + form_data = chart.form_data.copy() try: viz_obj = get_viz( datasource_type=chart.datasource.type, diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 3f6fee0043703..5b89919d0e8d6 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -78,6 +78,7 @@ from superset.connectors.sqla.utils import ( get_physical_table_metadata, get_virtual_table_metadata, + load_or_create_tables, validate_adhoc_subquery, ) from superset.datasets.models import Dataset as NewDataset @@ -2242,7 +2243,10 @@ def write_shadow_dataset( # pylint: disable=too-many-locals if column.is_active is False: continue - extra_json = json.loads(column.extra or "{}") + try: + extra_json = json.loads(column.extra or "{}") + except json.decoder.JSONDecodeError: + extra_json = {} for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: value = getattr(column, attr) if value: @@ -2269,7 +2273,10 @@ def write_shadow_dataset( # pylint: disable=too-many-locals # create metrics for metric in dataset.metrics: - extra_json = json.loads(metric.extra or "{}") + try: + extra_json = json.loads(metric.extra or "{}") + except json.decoder.JSONDecodeError: + extra_json = {} for attr in {"verbose_name", "metric_type", "d3format"}: value = getattr(metric, attr) if value: @@ -2300,8 +2307,7 @@ def write_shadow_dataset( # pylint: disable=too-many-locals ) # physical dataset - tables = [] - if dataset.sql is None: + if not dataset.sql: physical_columns = [column for column in columns if column.is_physical] # create table @@ -2314,7 +2320,7 @@ def write_shadow_dataset( # pylint: disable=too-many-locals is_managed_externally=dataset.is_managed_externally, external_url=dataset.external_url, ) - tables.append(table) + tables = [table] # virtual dataset else: @@ -2325,18 +2331,14 @@ def write_shadow_dataset( # pylint: disable=too-many-locals # find referenced tables parsed = ParsedQuery(dataset.sql) referenced_tables = parsed.tables - - # predicate for finding the referenced tables - predicate = or_( - *[ - and_( - NewTable.schema == (table.schema or dataset.schema), - NewTable.name == table.table, - ) - for table in referenced_tables - ] + tables = load_or_create_tables( + session, + dataset.database_id, + dataset.schema, + referenced_tables, + conditional_quote, + engine, ) - tables = session.query(NewTable).filter(predicate).all() # create the new dataset new_dataset = NewDataset( @@ -2345,7 +2347,7 @@ def write_shadow_dataset( # pylint: disable=too-many-locals expression=dataset.sql or conditional_quote(dataset.table_name), tables=tables, columns=columns, - is_physical=dataset.sql is None, + is_physical=not dataset.sql, is_managed_externally=dataset.is_managed_externally, external_url=dataset.external_url, ) diff --git a/superset/connectors/sqla/utils.py b/superset/connectors/sqla/utils.py index 389c5b9012a3b..4fc11a4d1d16b 100644 --- a/superset/connectors/sqla/utils.py +++ b/superset/connectors/sqla/utils.py @@ -15,13 +15,17 @@ # specific language governing permissions and limitations # under the License. from contextlib import closing -from typing import Dict, List, Optional, TYPE_CHECKING +from typing import Callable, Dict, List, Optional, Set, TYPE_CHECKING import sqlparse from flask_babel import lazy_gettext as _ +from sqlalchemy import and_, inspect, or_ +from sqlalchemy.engine import Engine from sqlalchemy.exc import NoSuchTableError +from sqlalchemy.orm import Session from sqlalchemy.sql.type_api import TypeEngine +from superset.columns.models import Column as NewColumn from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( SupersetGenericDBErrorException, @@ -29,12 +33,16 @@ ) from superset.models.core import Database from superset.result_set import SupersetResultSet -from superset.sql_parse import has_table_query, ParsedQuery +from superset.sql_parse import has_table_query, ParsedQuery, Table +from superset.tables.models import Table as NewTable if TYPE_CHECKING: from superset.connectors.sqla.models import SqlaTable +TEMPORAL_TYPES = {"DATETIME", "DATE", "TIME", "TIMEDELTA"} + + def get_physical_table_metadata( database: Database, table_name: str, @@ -151,3 +159,78 @@ def validate_adhoc_subquery(raw_sql: str) -> None: ) ) return + + +def load_or_create_tables( # pylint: disable=too-many-arguments + session: Session, + database_id: int, + default_schema: Optional[str], + tables: Set[Table], + conditional_quote: Callable[[str], str], + engine: Engine, +) -> List[NewTable]: + """ + Load or create new table model instances. + """ + if not tables: + return [] + + # set the default schema in tables that don't have it + if default_schema: + fixed_tables = list(tables) + for i, table in enumerate(fixed_tables): + if table.schema is None: + fixed_tables[i] = Table(table.table, default_schema, table.catalog) + tables = set(fixed_tables) + + # load existing tables + predicate = or_( + *[ + and_( + NewTable.database_id == database_id, + NewTable.schema == table.schema, + NewTable.name == table.table, + ) + for table in tables + ] + ) + new_tables = session.query(NewTable).filter(predicate).all() + + # add missing tables + existing = {(table.schema, table.name) for table in new_tables} + for table in tables: + if (table.schema, table.table) not in existing: + try: + inspector = inspect(engine) + column_metadata = inspector.get_columns( + table.table, schema=table.schema + ) + except Exception: # pylint: disable=broad-except + continue + columns = [ + NewColumn( + name=column["name"], + type=str(column["type"]), + expression=conditional_quote(column["name"]), + is_temporal=column["type"].python_type.__name__.upper() + in TEMPORAL_TYPES, + is_aggregation=False, + is_physical=True, + is_spatial=False, + is_partition=False, + is_increase_desired=True, + ) + for column in column_metadata + ] + new_tables.append( + NewTable( + name=table.table, + schema=table.schema, + catalog=None, + database_id=database_id, + columns=columns, + ) + ) + existing.add((table.schema, table.table)) + + return new_tables diff --git a/superset/dao/base.py b/superset/dao/base.py index ebd6a890886a8..607967e3041e2 100644 --- a/superset/dao/base.py +++ b/superset/dao/base.py @@ -15,12 +15,12 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=isinstance-second-argument-not-valid-type -from typing import Any, Dict, List, Optional, Type +from typing import Any, Dict, List, Optional, Type, Union from flask_appbuilder.models.filters import BaseFilter from flask_appbuilder.models.sqla import Model from flask_appbuilder.models.sqla.interface import SQLAInterface -from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.exc import SQLAlchemyError, StatementError from sqlalchemy.orm import Session from superset.dao.exceptions import ( @@ -46,9 +46,12 @@ class BaseDAO: """ Child classes can register base filtering to be aplied to all filter methods """ + id_column_name = "id" @classmethod - def find_by_id(cls, model_id: int, session: Session = None) -> Model: + def find_by_id( + cls, model_id: Union[str, int], session: Session = None + ) -> Optional[Model]: """ Find a model by id, if defined applies `base_filter` """ @@ -57,23 +60,28 @@ def find_by_id(cls, model_id: int, session: Session = None) -> Model: if cls.base_filter: data_model = SQLAInterface(cls.model_cls, session) query = cls.base_filter( # pylint: disable=not-callable - "id", data_model + cls.id_column_name, data_model ).apply(query, None) - return query.filter_by(id=model_id).one_or_none() + id_filter = {cls.id_column_name: model_id} + try: + return query.filter_by(**id_filter).one_or_none() + except StatementError: + # can happen if int is passed instead of a string or similar + return None @classmethod - def find_by_ids(cls, model_ids: List[int]) -> List[Model]: + def find_by_ids(cls, model_ids: Union[List[str], List[int]]) -> List[Model]: """ Find a List of models by a list of ids, if defined applies `base_filter` """ - id_col = getattr(cls.model_cls, "id", None) + id_col = getattr(cls.model_cls, cls.id_column_name, None) if id_col is None: return [] query = db.session.query(cls.model_cls).filter(id_col.in_(model_ids)) if cls.base_filter: data_model = SQLAInterface(cls.model_cls, db.session) query = cls.base_filter( # pylint: disable=not-callable - "id", data_model + cls.id_column_name, data_model ).apply(query, None) return query.all() @@ -86,7 +94,7 @@ def find_all(cls) -> List[Model]: if cls.base_filter: data_model = SQLAInterface(cls.model_cls, db.session) query = cls.base_filter( # pylint: disable=not-callable - "id", data_model + cls.id_column_name, data_model ).apply(query, None) return query.all() @@ -99,7 +107,7 @@ def find_one_or_none(cls, **filter_by: Any) -> Optional[Model]: if cls.base_filter: data_model = SQLAInterface(cls.model_cls, db.session) query = cls.base_filter( # pylint: disable=not-callable - "id", data_model + cls.id_column_name, data_model ).apply(query, None) return query.filter_by(**filter_by).one_or_none() diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 5e761cdd1f092..d97b5f78e3c04 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -15,14 +15,16 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=too-many-lines +import functools import json import logging from datetime import datetime from io import BytesIO -from typing import Any, Optional +from typing import Any, Callable, Optional from zipfile import is_zipfile, ZipFile from flask import g, make_response, redirect, request, Response, send_file, url_for +from flask_appbuilder import permission_name from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.hooks import before_request from flask_appbuilder.models.sqla.interface import SQLAInterface @@ -65,6 +67,8 @@ DashboardGetResponseSchema, DashboardPostSchema, DashboardPutSchema, + EmbeddedDashboardConfigSchema, + EmbeddedDashboardResponseSchema, get_delete_ids_schema, get_export_ids_schema, get_fav_star_ids_schema, @@ -72,8 +76,10 @@ openapi_spec_methods_override, thumbnail_query_schema, ) +from superset.embedded.dao import EmbeddedDAO from superset.extensions import event_logger from superset.models.dashboard import Dashboard +from superset.models.embedded_dashboard import EmbeddedDashboard from superset.tasks.thumbnails import cache_dashboard_thumbnail from superset.utils.cache import etag_cache from superset.utils.screenshots import DashboardScreenshot @@ -91,6 +97,27 @@ logger = logging.getLogger(__name__) +def with_dashboard( + f: Callable[[BaseSupersetModelRestApi, Dashboard], Response] +) -> Callable[[BaseSupersetModelRestApi, str], Response]: + """ + A decorator that looks up the dashboard by id or slug and passes it to the api. + Route must include an parameter. + Responds with 403 or 404 without calling the route, if necessary. + """ + + def wraps(self: BaseSupersetModelRestApi, id_or_slug: str) -> Response: + try: + dash = DashboardDAO.get_by_id_or_slug(id_or_slug) + return f(self, dash) + except DashboardAccessDeniedError: + return self.response_403() + except DashboardNotFoundError: + return self.response_404() + + return functools.update_wrapper(wraps, f) + + class DashboardRestApi(BaseSupersetModelRestApi): datamodel = SQLAInterface(Dashboard) @@ -108,6 +135,9 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "favorite_status", "get_charts", "get_datasets", + "get_embedded", + "set_embedded", + "delete_embedded", "thumbnail", } resource_name = "dashboard" @@ -193,6 +223,8 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: chart_entity_response_schema = ChartEntityResponseSchema() dashboard_get_response_schema = DashboardGetResponseSchema() dashboard_dataset_schema = DashboardDatasetSchema() + embedded_response_schema = EmbeddedDashboardResponseSchema() + embedded_config_schema = EmbeddedDashboardConfigSchema() base_filters = [["id", DashboardAccessFilter, lambda: []]] @@ -215,6 +247,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: DashboardGetResponseSchema, DashboardDatasetSchema, GetFavStarIdsSchema, + EmbeddedDashboardResponseSchema, ) apispec_parameter_schemas = { "get_delete_ids_schema": get_delete_ids_schema, @@ -248,9 +281,11 @@ def __repr__(self) -> str: @statsd_metrics @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get", - log_to_statsd=False, # pylint: disable=arguments-renamed + log_to_statsd=False, ) - def get(self, id_or_slug: str) -> Response: + @with_dashboard + # pylint: disable=arguments-renamed, arguments-differ + def get(self, dash: Dashboard) -> Response: """Gets a dashboard --- get: @@ -283,15 +318,8 @@ def get(self, id_or_slug: str) -> Response: 404: $ref: '#/components/responses/404' """ - # pylint: disable=arguments-differ - try: - dash = DashboardDAO.get_by_id_or_slug(id_or_slug) - result = self.dashboard_get_response_schema.dump(dash) - return self.response(200, result=result) - except DashboardAccessDeniedError: - return self.response_403() - except DashboardNotFoundError: - return self.response_404() + result = self.dashboard_get_response_schema.dump(dash) + return self.response(200, result=result) @etag_cache( get_last_modified=lambda _self, id_or_slug: DashboardDAO.get_dashboard_and_datasets_changed_on( # pylint: disable=line-too-long,useless-suppression @@ -1001,3 +1029,168 @@ def import_(self) -> Response: ) command.run() return self.response(200, message="OK") + + @expose("//embedded", methods=["GET"]) + @protect() + @safe + @permission_name("read") + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get_embedded", + log_to_statsd=False, + ) + @with_dashboard + def get_embedded(self, dashboard: Dashboard) -> Response: + """Response + Returns the dashboard's embedded configuration + --- + get: + description: >- + Returns the dashboard's embedded configuration + parameters: + - in: path + schema: + type: string + name: id_or_slug + description: The dashboard id or slug + responses: + 200: + description: Result contains the embedded dashboard config + content: + application/json: + schema: + type: object + properties: + result: + $ref: '#/components/schemas/EmbeddedDashboardResponseSchema' + 401: + $ref: '#/components/responses/401' + 500: + $ref: '#/components/responses/500' + """ + if not dashboard.embedded: + return self.response(404) + embedded: EmbeddedDashboard = dashboard.embedded[0] + result = self.embedded_response_schema.dump(embedded) + return self.response(200, result=result) + + @expose("//embedded", methods=["POST", "PUT"]) + @protect() + @safe + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.set_embedded", + log_to_statsd=False, + ) + @with_dashboard + def set_embedded(self, dashboard: Dashboard) -> Response: + """Response + Sets a dashboard's embedded configuration. + --- + post: + description: >- + Sets a dashboard's embedded configuration. + parameters: + - in: path + schema: + type: string + name: id_or_slug + description: The dashboard id or slug + requestBody: + description: The embedded configuration to set + required: true + content: + application/json: + schema: EmbeddedDashboardConfigSchema + responses: + 200: + description: Successfully set the configuration + content: + application/json: + schema: + type: object + properties: + result: + $ref: '#/components/schemas/EmbeddedDashboardResponseSchema' + 401: + $ref: '#/components/responses/401' + 500: + $ref: '#/components/responses/500' + put: + description: >- + Sets a dashboard's embedded configuration. + parameters: + - in: path + schema: + type: string + name: id_or_slug + description: The dashboard id or slug + requestBody: + description: The embedded configuration to set + required: true + content: + application/json: + schema: EmbeddedDashboardConfigSchema + responses: + 200: + description: Successfully set the configuration + content: + application/json: + schema: + type: object + properties: + result: + $ref: '#/components/schemas/EmbeddedDashboardResponseSchema' + 401: + $ref: '#/components/responses/401' + 500: + $ref: '#/components/responses/500' + """ + try: + body = self.embedded_config_schema.load(request.json) + embedded = EmbeddedDAO.upsert(dashboard, body["allowed_domains"]) + result = self.embedded_response_schema.dump(embedded) + return self.response(200, result=result) + except ValidationError as error: + return self.response_400(message=error.messages) + + @expose("//embedded", methods=["DELETE"]) + @protect() + @safe + @permission_name("set_embedded") + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.delete_embedded", + log_to_statsd=False, + ) + @with_dashboard + def delete_embedded(self, dashboard: Dashboard) -> Response: + """Response + Removes a dashboard's embedded configuration. + --- + delete: + description: >- + Removes a dashboard's embedded configuration. + parameters: + - in: path + schema: + type: string + name: id_or_slug + description: The dashboard id or slug + responses: + 200: + description: Successfully removed the configuration + content: + application/json: + schema: + type: object + properties: + message: + type: string + 401: + $ref: '#/components/responses/401' + 500: + $ref: '#/components/responses/500' + """ + dashboard.embedded = [] + return self.response(200, message="OK") diff --git a/superset/dashboards/commands/export.py b/superset/dashboards/commands/export.py index 87408bab37706..c7aa8b6e5c65b 100644 --- a/superset/dashboards/commands/export.py +++ b/superset/dashboards/commands/export.py @@ -140,9 +140,10 @@ def _export( dataset_id = target.pop("datasetId", None) if dataset_id is not None: dataset = DatasetDAO.find_by_id(dataset_id) - target["datasetUuid"] = str(dataset.uuid) - if export_related: - yield from ExportDatasetsCommand([dataset_id]).run() + if dataset: + target["datasetUuid"] = str(dataset.uuid) + if export_related: + yield from ExportDatasetsCommand([dataset_id]).run() # the mapping between dashboard -> charts is inferred from the position # attribute, so if it's not present we need to add a default config diff --git a/superset/dashboards/filters.py b/superset/dashboards/filters.py index 5f79392e71ecd..52a945ca41a62 100644 --- a/superset/dashboards/filters.py +++ b/superset/dashboards/filters.py @@ -14,7 +14,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Any, Optional +import uuid +from typing import Any, Optional, Union from flask import g from flask_appbuilder.security.sqla.models import Role @@ -25,6 +26,7 @@ from superset import db, is_feature_enabled, security_manager from superset.models.core import FavStar from superset.models.dashboard import Dashboard +from superset.models.embedded_dashboard import EmbeddedDashboard from superset.models.slice import Slice from superset.security.guest_token import GuestTokenResourceType, GuestUser from superset.views.base import BaseFilter, is_user_admin @@ -59,6 +61,14 @@ class DashboardFavoriteFilter( # pylint: disable=too-few-public-methods model = Dashboard +def is_uuid(value: Union[str, int]) -> bool: + try: + uuid.UUID(str(value)) + return True + except ValueError: + return False + + class DashboardAccessFilter(BaseFilter): # pylint: disable=too-few-public-methods """ List dashboards with the following criteria: @@ -133,14 +143,24 @@ def apply(self, query: Query, value: Any) -> Query: if is_feature_enabled("EMBEDDED_SUPERSET") and security_manager.is_guest_user( g.user ): + guest_user: GuestUser = g.user embedded_dashboard_ids = [ r["id"] for r in guest_user.resources if r["type"] == GuestTokenResourceType.DASHBOARD.value ] - if len(embedded_dashboard_ids) != 0: - feature_flagged_filters.append(Dashboard.id.in_(embedded_dashboard_ids)) + + # TODO (embedded): only use uuid filter once uuids are rolled out + condition = ( + Dashboard.embedded.any( + EmbeddedDashboard.uuid.in_(embedded_dashboard_ids) + ) + if any(is_uuid(id_) for id_ in embedded_dashboard_ids) + else Dashboard.id.in_(embedded_dashboard_ids) + ) + + feature_flagged_filters.append(condition) query = query.filter( or_( diff --git a/superset/dashboards/permalink/commands/create.py b/superset/dashboards/permalink/commands/create.py index 27ddf0534da88..4ffd41104ea08 100644 --- a/superset/dashboards/permalink/commands/create.py +++ b/superset/dashboards/permalink/commands/create.py @@ -53,6 +53,8 @@ def run(self) -> str: resource=self.resource, value=value, ).run() + if key.id is None: + raise DashboardPermalinkCreateFailedError("Unexpected missing key id") return encode_permalink_key(key=key.id, salt=self.salt) except SQLAlchemyError as ex: logger.exception("Error running create command") diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py index 9b668df7212d0..d91879f0d88b3 100644 --- a/superset/dashboards/schemas.py +++ b/superset/dashboards/schemas.py @@ -309,3 +309,15 @@ class ImportV1DashboardSchema(Schema): version = fields.String(required=True) is_managed_externally = fields.Boolean(allow_none=True, default=False) external_url = fields.String(allow_none=True) + + +class EmbeddedDashboardConfigSchema(Schema): + allowed_domains = fields.List(fields.String(), required=True) + + +class EmbeddedDashboardResponseSchema(Schema): + uuid = fields.String() + allowed_domains = fields.List(fields.String()) + dashboard_id = fields.String() + changed_on = fields.DateTime() + changed_by = fields.Nested(UserSchema) diff --git a/superset/databases/dao.py b/superset/databases/dao.py index d8813dc8eba3e..5e47772cfc635 100644 --- a/superset/databases/dao.py +++ b/superset/databases/dao.py @@ -68,7 +68,8 @@ def build_db_for_connection_test( @classmethod def get_related_objects(cls, database_id: int) -> Dict[str, Any]: - datasets = cls.find_by_id(database_id).tables + database: Any = cls.find_by_id(database_id) + datasets = database.tables dataset_ids = [dataset.id for dataset in datasets] charts = ( diff --git a/superset/embedded/__init__.py b/superset/embedded/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/embedded/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/embedded/dao.py b/superset/embedded/dao.py new file mode 100644 index 0000000000000..957a7242a77d3 --- /dev/null +++ b/superset/embedded/dao.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, Dict, List + +from superset.dao.base import BaseDAO +from superset.extensions import db +from superset.models.dashboard import Dashboard +from superset.models.embedded_dashboard import EmbeddedDashboard + +logger = logging.getLogger(__name__) + + +class EmbeddedDAO(BaseDAO): + model_cls = EmbeddedDashboard + # There isn't really a regular scenario where we would rather get Embedded by id + id_column_name = "uuid" + + @staticmethod + def upsert(dashboard: Dashboard, allowed_domains: List[str]) -> EmbeddedDashboard: + """ + Sets up a dashboard to be embeddable. + Upsert is used to preserve the embedded_dashboard uuid across updates. + """ + embedded: EmbeddedDashboard = ( + dashboard.embedded[0] if dashboard.embedded else EmbeddedDashboard() + ) + embedded.allow_domain_list = ",".join(allowed_domains) + dashboard.embedded = [embedded] + db.session.commit() + return embedded + + @classmethod + def create(cls, properties: Dict[str, Any], commit: bool = True) -> Any: + """ + Use EmbeddedDAO.upsert() instead. + At least, until we are ok with more than one embedded instance per dashboard. + """ + raise NotImplementedError("Use EmbeddedDAO.upsert() instead.") diff --git a/superset/embedded/view.py b/superset/embedded/view.py new file mode 100644 index 0000000000000..487850b728851 --- /dev/null +++ b/superset/embedded/view.py @@ -0,0 +1,80 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from typing import Callable + +from flask import abort +from flask_appbuilder import expose +from flask_login import AnonymousUserMixin, LoginManager + +from superset import event_logger, is_feature_enabled, security_manager +from superset.embedded.dao import EmbeddedDAO +from superset.superset_typing import FlaskResponse +from superset.utils import core as utils +from superset.views.base import BaseSupersetView, common_bootstrap_payload + + +class EmbeddedView(BaseSupersetView): + """The views for embedded resources to be rendered in an iframe""" + + route_base = "/embedded" + + @expose("/") + @event_logger.log_this_with_extra_payload + def embedded( + self, + uuid: str, + add_extra_log_payload: Callable[..., None] = lambda **kwargs: None, + ) -> FlaskResponse: + """ + Server side rendering for the embedded dashboard page + :param uuid: identifier for embedded dashboard + :param add_extra_log_payload: added by `log_this_with_manual_updates`, set a + default value to appease pylint + """ + if not is_feature_enabled("EMBEDDED_SUPERSET"): + abort(404) + + embedded = EmbeddedDAO.find_by_id(uuid) + if not embedded: + abort(404) + + # Log in as an anonymous user, just for this view. + # This view needs to be visible to all users, + # and building the page fails if g.user and/or ctx.user aren't present. + login_manager: LoginManager = security_manager.lm + login_manager.reload_user(AnonymousUserMixin()) + + add_extra_log_payload( + embedded_dashboard_id=uuid, + dashboard_version="v2", + ) + + bootstrap_data = { + "common": common_bootstrap_payload(), + "embedded": { + "dashboard_id": embedded.dashboard_id, + }, + } + + return self.render_template( + "superset/spa.html", + entry="embedded", + bootstrap_data=json.dumps( + bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser + ), + ) diff --git a/superset/explore/permalink/commands/create.py b/superset/explore/permalink/commands/create.py index 55fb0820cda0b..c09ca3b372121 100644 --- a/superset/explore/permalink/commands/create.py +++ b/superset/explore/permalink/commands/create.py @@ -53,6 +53,8 @@ def run(self) -> str: value=value, ) key = command.run() + if key.id is None: + raise ExplorePermalinkCreateFailedError("Unexpected missing key id") return encode_permalink_key(key=key.id, salt=self.salt) except SQLAlchemyError as ex: logger.exception("Error running create command") diff --git a/superset/importexport/__init__.py b/superset/importexport/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/importexport/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index 2b970b718fadf..5b11f0930a782 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -141,6 +141,7 @@ def init_views(self) -> None: from superset.datasets.api import DatasetRestApi from superset.datasets.columns.api import DatasetColumnsRestApi from superset.datasets.metrics.api import DatasetMetricRestApi + from superset.embedded.view import EmbeddedView from superset.explore.form_data.api import ExploreFormDataRestApi from superset.explore.permalink.api import ExplorePermalinkRestApi from superset.importexport.api import ImportExportRestApi @@ -292,6 +293,7 @@ def init_views(self) -> None: appbuilder.add_view_no_menu(Dashboard) appbuilder.add_view_no_menu(DashboardModelViewAsync) appbuilder.add_view_no_menu(Datasource) + appbuilder.add_view_no_menu(EmbeddedView) appbuilder.add_view_no_menu(KV) appbuilder.add_view_no_menu(R) appbuilder.add_view_no_menu(SavedQueryView) diff --git a/superset/key_value/commands/__init__.py b/superset/key_value/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/key_value/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/key_value/commands/create.py b/superset/key_value/commands/create.py index 613fabcdeb1f7..5125ce7b01e28 100644 --- a/superset/key_value/commands/create.py +++ b/superset/key_value/commands/create.py @@ -39,6 +39,7 @@ class CreateKeyValueCommand(BaseCommand): key: Optional[Union[int, UUID]] expires_on: Optional[datetime] + # pylint: disable=too-many-arguments def __init__( self, resource: KeyValueResource, diff --git a/superset/key_value/commands/update.py b/superset/key_value/commands/update.py index 7333b48c5cc34..48fd8daa8a458 100644 --- a/superset/key_value/commands/update.py +++ b/superset/key_value/commands/update.py @@ -41,6 +41,7 @@ class UpdateKeyValueCommand(BaseCommand): key: Union[int, UUID] expires_on: Optional[datetime] + # pylint: disable=too-many-argumentsåå def __init__( self, resource: KeyValueResource, diff --git a/superset/key_value/commands/upsert.py b/superset/key_value/commands/upsert.py index aa495f7cc77c1..8fd0bd240f2be 100644 --- a/superset/key_value/commands/upsert.py +++ b/superset/key_value/commands/upsert.py @@ -42,6 +42,7 @@ class UpsertKeyValueCommand(BaseCommand): key: Union[int, UUID] expires_on: Optional[datetime] + # pylint: disable=too-many-arguments def __init__( self, resource: KeyValueResource, @@ -96,11 +97,10 @@ def upsert(self) -> Optional[Key]: db.session.merge(entry) db.session.commit() return Key(entry.id, entry.uuid) - else: - return CreateKeyValueCommand( - resource=self.resource, - value=self.value, - actor=self.actor, - key=self.key, - expires_on=self.expires_on, - ).run() + return CreateKeyValueCommand( + resource=self.resource, + value=self.value, + actor=self.actor, + key=self.key, + expires_on=self.expires_on, + ).run() diff --git a/superset/migrations/shared/utils.py b/superset/migrations/shared/utils.py index 03317181178d7..bff25e05d137f 100644 --- a/superset/migrations/shared/utils.py +++ b/superset/migrations/shared/utils.py @@ -14,10 +14,39 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import logging +from typing import Any, Iterator, Optional, Set + from alembic import op from sqlalchemy import engine_from_config from sqlalchemy.engine import reflection from sqlalchemy.exc import NoSuchTableError +from sqloxide import parse_sql + +from superset.sql_parse import ParsedQuery, Table + +logger = logging.getLogger("alembic") + + +# mapping between sqloxide and SQLAlchemy dialects +sqloxide_dialects = { + "ansi": {"trino", "trinonative", "presto"}, + "hive": {"hive", "databricks"}, + "ms": {"mssql"}, + "mysql": {"mysql"}, + "postgres": { + "cockroachdb", + "hana", + "netezza", + "postgres", + "postgresql", + "redshift", + "vertica", + }, + "snowflake": {"snowflake"}, + "sqlite": {"sqlite", "gsheets", "shillelagh"}, + "clickhouse": {"clickhouse"}, +} def table_has_column(table: str, column: str) -> bool: @@ -38,3 +67,40 @@ def table_has_column(table: str, column: str) -> bool: return any(col["name"] == column for col in insp.get_columns(table)) except NoSuchTableError: return False + + +def find_nodes_by_key(element: Any, target: str) -> Iterator[Any]: + """ + Find all nodes in a SQL tree matching a given key. + """ + if isinstance(element, list): + for child in element: + yield from find_nodes_by_key(child, target) + elif isinstance(element, dict): + for key, value in element.items(): + if key == target: + yield value + else: + yield from find_nodes_by_key(value, target) + + +def extract_table_references(sql_text: str, sqla_dialect: str) -> Set[Table]: + """ + Return all the dependencies from a SQL sql_text. + """ + dialect = "generic" + for dialect, sqla_dialects in sqloxide_dialects.items(): + if sqla_dialect in sqla_dialects: + break + try: + tree = parse_sql(sql_text, dialect=dialect) + except Exception: # pylint: disable=broad-except + logger.warning("Unable to parse query with sqloxide: %s", sql_text) + # fallback to sqlparse + parsed = ParsedQuery(sql_text) + return parsed.tables + + return { + Table(*[part["value"] for part in table["name"][::-1]]) + for table in find_nodes_by_key(tree, "Table") + } diff --git a/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py index 35419e0066cd4..75f5293034ead 100644 --- a/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py +++ b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py @@ -25,7 +25,7 @@ """ import json -from typing import List +from typing import Callable, List, Optional, Set from uuid import uuid4 import sqlalchemy as sa @@ -40,7 +40,9 @@ from superset import app, db from superset.connectors.sqla.models import ADDITIVE_METRIC_TYPES from superset.extensions import encrypted_field_factory -from superset.sql_parse import ParsedQuery +from superset.migrations.shared.utils import extract_table_references +from superset.models.core import Database as OriginalDatabase +from superset.sql_parse import Table # revision identifiers, used by Alembic. revision = "b8d3a24d9131" @@ -228,6 +230,85 @@ class NewDataset(Base): external_url = sa.Column(sa.Text, nullable=True) +TEMPORAL_TYPES = {"DATETIME", "DATE", "TIME", "TIMEDELTA"} + + +def load_or_create_tables( + session: Session, + database_id: int, + default_schema: Optional[str], + tables: Set[Table], + conditional_quote: Callable[[str], str], +) -> List[NewTable]: + """ + Load or create new table model instances. + """ + if not tables: + return [] + + # set the default schema in tables that don't have it + if default_schema: + tables = list(tables) + for i, table in enumerate(tables): + if table.schema is None: + tables[i] = Table(table.table, default_schema, table.catalog) + + # load existing tables + predicate = or_( + *[ + and_( + NewTable.database_id == database_id, + NewTable.schema == table.schema, + NewTable.name == table.table, + ) + for table in tables + ] + ) + new_tables = session.query(NewTable).filter(predicate).all() + + # use original database model to get the engine + engine = ( + session.query(OriginalDatabase) + .filter_by(id=database_id) + .one() + .get_sqla_engine(default_schema) + ) + inspector = inspect(engine) + + # add missing tables + existing = {(table.schema, table.name) for table in new_tables} + for table in tables: + if (table.schema, table.table) not in existing: + column_metadata = inspector.get_columns(table.table, schema=table.schema) + columns = [ + NewColumn( + name=column["name"], + type=str(column["type"]), + expression=conditional_quote(column["name"]), + is_temporal=column["type"].python_type.__name__.upper() + in TEMPORAL_TYPES, + is_aggregation=False, + is_physical=True, + is_spatial=False, + is_partition=False, + is_increase_desired=True, + ) + for column in column_metadata + ] + new_tables.append( + NewTable( + name=table.table, + schema=table.schema, + catalog=None, + database_id=database_id, + columns=columns, + ) + ) + existing.add((table.schema, table.table)) + + return new_tables + + def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals """ Copy old datasets to the new models. @@ -253,7 +334,10 @@ def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals if column.is_active is False: continue - extra_json = json.loads(column.extra or "{}") + try: + extra_json = json.loads(column.extra or "{}") + except json.decoder.JSONDecodeError: + extra_json = {} for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: value = getattr(column, attr) if value: @@ -279,7 +363,10 @@ def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals # create metrics for metric in target.metrics: - extra_json = json.loads(metric.extra or "{}") + try: + extra_json = json.loads(metric.extra or "{}") + except json.decoder.JSONDecodeError: + extra_json = {} for attr in {"verbose_name", "metric_type", "d3format"}: value = getattr(metric, attr) if value: @@ -309,8 +396,7 @@ def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals ) # physical dataset - tables = [] - if target.sql is None: + if not target.sql: physical_columns = [column for column in columns if column.is_physical] # create table @@ -323,7 +409,7 @@ def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals is_managed_externally=target.is_managed_externally, external_url=target.external_url, ) - tables.append(table) + tables = [table] # virtual dataset else: @@ -332,20 +418,14 @@ def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals column.is_physical = False # find referenced tables - parsed = ParsedQuery(target.sql) - referenced_tables = parsed.tables - - # predicate for finding the referenced tables - predicate = or_( - *[ - and_( - NewTable.schema == (table.schema or target.schema), - NewTable.name == table.table, - ) - for table in referenced_tables - ] + referenced_tables = extract_table_references(target.sql, dialect_class.name) + tables = load_or_create_tables( + session, + target.database_id, + target.schema, + referenced_tables, + conditional_quote, ) - tables = session.query(NewTable).filter(predicate).all() # create the new dataset dataset = NewDataset( @@ -354,7 +434,7 @@ def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals expression=target.sql or conditional_quote(target.table_name), tables=tables, columns=columns, - is_physical=target.sql is None, + is_physical=not target.sql, is_managed_externally=target.is_managed_externally, external_url=target.external_url, ) diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 7a8710af0d4af..50619829c289e 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -152,6 +152,11 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin): is_managed_externally = Column(Boolean, nullable=False, default=False) external_url = Column(Text, nullable=True) roles = relationship(security_manager.role_model, secondary=DashboardRoles) + embedded = relationship( + "EmbeddedDashboard", + back_populates="dashboard", + cascade="all, delete-orphan", + ) _filter_sets = relationship( "FilterSet", back_populates="dashboard", cascade="all, delete" ) diff --git a/superset/models/embedded_dashboard.py b/superset/models/embedded_dashboard.py new file mode 100644 index 0000000000000..7718bc886f97a --- /dev/null +++ b/superset/models/embedded_dashboard.py @@ -0,0 +1,57 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import uuid +from typing import List + +from flask_appbuilder import Model +from sqlalchemy import Column, ForeignKey, Integer, Text +from sqlalchemy.orm import relationship +from sqlalchemy_utils import UUIDType + +from superset.models.helpers import AuditMixinNullable + + +class EmbeddedDashboard(Model, AuditMixinNullable): + """ + A configuration of embedding for a dashboard. + Currently, the only embeddable resource is the Dashboard. + If we add new embeddable resource types, this model should probably be renamed. + + References the dashboard, and contains a config for embedding that dashboard. + + This data model allows multiple configurations for a given dashboard, + but at this time the API only allows setting one. + """ + + __tablename__ = "embedded_dashboards" + + uuid = Column(UUIDType(binary=True), default=uuid.uuid4, primary_key=True) + allow_domain_list = Column(Text) # reference the `allowed_domains` property instead + dashboard_id = Column(Integer, ForeignKey("dashboards.id"), nullable=False) + dashboard = relationship( + "Dashboard", + back_populates="embedded", + foreign_keys=[dashboard_id], + ) + + @property + def allowed_domains(self) -> List[str]: + """ + A list of domains which are allowed to embed the dashboard. + An empty list means any domain can embed. + """ + return self.allow_domain_list.split(",") if self.allow_domain_list else [] diff --git a/superset/security/manager.py b/superset/security/manager.py index 57f3f2f9586e3..ab21dbd54eed4 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -189,6 +189,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods "can_update_role", "all_query_access", "can_grant_guest_token", + "can_set_embedded", } READ_ONLY_PERMISSION = { @@ -1268,10 +1269,8 @@ def has_rbac_access() -> bool: for dashboard_role in dashboard.roles ) - if self.is_guest_user(): - can_access = self.has_guest_access( - GuestTokenResourceType.DASHBOARD, dashboard.id - ) + if self.is_guest_user() and dashboard.embedded: + can_access = self.has_guest_access(dashboard) else: can_access = ( is_user_admin() @@ -1410,15 +1409,26 @@ def get_current_guest_user_if_guest(self) -> Optional[GuestUser]: return g.user return None - def has_guest_access( - self, resource_type: GuestTokenResourceType, resource_id: Union[str, int] - ) -> bool: + def has_guest_access(self, dashboard: "Dashboard") -> bool: user = self.get_current_guest_user_if_guest() if not user: return False - strid = str(resource_id) - for resource in user.resources: - if resource["type"] == resource_type.value and str(resource["id"]) == strid: + dashboards = [ + r + for r in user.resources + if r["type"] == GuestTokenResourceType.DASHBOARD.value + ] + + # TODO (embedded): remove this check once uuids are rolled out + for resource in dashboards: + if str(resource["id"]) == str(dashboard.id): + return True + + if not dashboard.embedded: + return False + + for resource in dashboards: + if str(resource["id"]) == str(dashboard.embedded[0].uuid): return True return False diff --git a/superset/sqllab/command.py b/superset/sqllab/command.py index bdc570b603433..ff50e18eda9e5 100644 --- a/superset/sqllab/command.py +++ b/superset/sqllab/command.py @@ -138,7 +138,7 @@ def _run_sql_json_exec_from_scratch(self) -> SqlJsonExecutionStatus: raise ex def _get_the_query_db(self) -> Database: - mydb = self._database_dao.find_by_id(self._execution_context.database_id) + mydb: Any = self._database_dao.find_by_id(self._execution_context.database_id) self._validate_query_db(mydb) return mydb diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py index 49bdf76edcb0b..256bb4c95c025 100644 --- a/superset/views/dashboard/views.py +++ b/superset/views/dashboard/views.py @@ -161,6 +161,7 @@ def embedded( bootstrap_data = { "common": common_bootstrap_payload(), + "embedded": {"dashboard_id": dashboard_id_or_slug}, } return self.render_template( diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 7fc8c2e2518fc..a179fa7c7e453 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -388,7 +388,14 @@ def test_info_security_database(self): rv = self.get_assert_metric(uri, "info") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 200 - assert set(data["permissions"]) == {"can_read", "can_write", "can_export"} + assert set(data["permissions"]) == { + "can_read", + "can_write", + "can_export", + "can_get_embedded", + "can_delete_embedded", + "can_set_embedded", + } @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") def test_get_dashboard_not_found(self): @@ -1710,3 +1717,58 @@ def test_get_filter_related_roles(self): response_roles = [result["text"] for result in response["result"]] assert "Alpha" in response_roles + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_embedded_dashboards(self): + self.login(username="admin") + uri = "api/v1/dashboard/world_health/embedded" + + # initial get should return 404 + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 404) + + # post succeeds and returns value + allowed_domains = ["test.example", "embedded.example"] + resp = self.post_assert_metric( + uri, + {"allowed_domains": allowed_domains}, + "set_embedded", + ) + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertIsNotNone(result["uuid"]) + self.assertNotEqual(result["uuid"], "") + self.assertEqual(result["allowed_domains"], allowed_domains) + + # get returns value + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertIsNotNone(result["uuid"]) + self.assertNotEqual(result["uuid"], "") + self.assertEqual(result["allowed_domains"], allowed_domains) + + # save uuid for later + original_uuid = result["uuid"] + + # put succeeds and returns value + resp = self.post_assert_metric(uri, {"allowed_domains": []}, "set_embedded") + self.assertEqual(resp.status_code, 200) + self.assertIsNotNone(result["uuid"]) + self.assertNotEqual(result["uuid"], "") + self.assertEqual(result["allowed_domains"], allowed_domains) + + # get returns changed value + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 200) + result = json.loads(resp.data.decode("utf-8"))["result"] + self.assertEqual(result["uuid"], original_uuid) + self.assertEqual(result["allowed_domains"], []) + + # delete succeeds + resp = self.delete_assert_metric(uri, "delete_embedded") + self.assertEqual(resp.status_code, 200) + + # get returns 404 + resp = self.get_assert_metric(uri, "get_embedded") + self.assertEqual(resp.status_code, 404) diff --git a/tests/integration_tests/embedded/__init__.py b/tests/integration_tests/embedded/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/integration_tests/embedded/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/integration_tests/embedded/dao_tests.py b/tests/integration_tests/embedded/dao_tests.py new file mode 100644 index 0000000000000..8160144a25cbc --- /dev/null +++ b/tests/integration_tests/embedded/dao_tests.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +import pytest + +import tests.integration_tests.test_app # pylint: disable=unused-import +from superset import db +from superset.embedded.dao import EmbeddedDAO +from superset.models.dashboard import Dashboard +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices, + load_world_bank_data, +) + + +class TestEmbeddedDAO(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_upsert(self): + dash = db.session.query(Dashboard).filter_by(slug="world_health").first() + assert not dash.embedded + EmbeddedDAO.upsert(dash, ["test.example.com"]) + assert dash.embedded + self.assertEqual(dash.embedded[0].allowed_domains, ["test.example.com"]) + original_uuid = dash.embedded[0].uuid + self.assertIsNotNone(original_uuid) + EmbeddedDAO.upsert(dash, []) + self.assertEqual(dash.embedded[0].allowed_domains, []) + self.assertEqual(dash.embedded[0].uuid, original_uuid) + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_get_by_uuid(self): + dash = db.session.query(Dashboard).filter_by(slug="world_health").first() + uuid = str(EmbeddedDAO.upsert(dash, ["test.example.com"]).uuid) + db.session.expire_all() + embedded = EmbeddedDAO.find_by_id(uuid) + self.assertIsNotNone(embedded) diff --git a/tests/integration_tests/key_value/commands/update_test.py b/tests/integration_tests/key_value/commands/update_test.py index 62a8126ba2ac6..3b24ecdf0a300 100644 --- a/tests/integration_tests/key_value/commands/update_test.py +++ b/tests/integration_tests/key_value/commands/update_test.py @@ -53,6 +53,7 @@ def test_update_id_entry( key=ID_KEY, value=NEW_VALUE, ).run() + assert key is not None assert key.id == ID_KEY entry = db.session.query(KeyValueEntry).filter_by(id=ID_KEY).autoflush(False).one() assert pickle.loads(entry.value) == NEW_VALUE @@ -73,6 +74,7 @@ def test_update_uuid_entry( key=UUID_KEY, value=NEW_VALUE, ).run() + assert key is not None assert key.uuid == UUID_KEY entry = ( db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).autoflush(False).one() diff --git a/tests/integration_tests/key_value/commands/upsert_test.py b/tests/integration_tests/key_value/commands/upsert_test.py index adb652e66a195..1970a1fc2c293 100644 --- a/tests/integration_tests/key_value/commands/upsert_test.py +++ b/tests/integration_tests/key_value/commands/upsert_test.py @@ -53,6 +53,7 @@ def test_upsert_id_entry( key=ID_KEY, value=NEW_VALUE, ).run() + assert key is not None assert key.id == ID_KEY entry = ( db.session.query(KeyValueEntry).filter_by(id=int(ID_KEY)).autoflush(False).one() @@ -75,6 +76,7 @@ def test_upsert_uuid_entry( key=UUID_KEY, value=NEW_VALUE, ).run() + assert key is not None assert key.uuid == UUID_KEY entry = ( db.session.query(KeyValueEntry).filter_by(uuid=UUID_KEY).autoflush(False).one() @@ -93,6 +95,7 @@ def test_upsert_missing_entry(app_context: AppContext, admin: User) -> None: key=456, value=NEW_VALUE, ).run() + assert key is not None assert key.id == 456 db.session.query(KeyValueEntry).filter_by(id=456).delete() db.session.commit() diff --git a/tests/integration_tests/security/guest_token_security_tests.py b/tests/integration_tests/security/guest_token_security_tests.py index e4d55d9747b98..78bd8bde86f51 100644 --- a/tests/integration_tests/security/guest_token_security_tests.py +++ b/tests/integration_tests/security/guest_token_security_tests.py @@ -22,6 +22,7 @@ from superset import db, security_manager from superset.dashboards.commands.exceptions import DashboardAccessDeniedError +from superset.embedded.dao import EmbeddedDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard from superset.security.guest_token import GuestTokenResourceType @@ -38,14 +39,9 @@ EMBEDDED_SUPERSET=True, ) class TestGuestUserSecurity(SupersetTestCase): - # This test doesn't use a dashboard fixture, the next test does. - # That way tests are faster. - - resource_id = 42 - def authorized_guest(self): return security_manager.get_guest_user_from_token( - {"user": {}, "resources": [{"type": "dashboard", "id": self.resource_id}]} + {"user": {}, "resources": [{"type": "dashboard", "id": "some-uuid"}]} ) def test_is_guest_user__regular_user(self): @@ -83,88 +79,86 @@ def test_get_guest_user__guest_user(self): guest_user = security_manager.get_current_guest_user_if_guest() self.assertEqual(guest_user, g.user) + def test_get_guest_user_roles_explicit(self): + guest = self.authorized_guest() + roles = security_manager.get_user_roles(guest) + self.assertEqual(guest.roles, roles) + + def test_get_guest_user_roles_implicit(self): + guest = self.authorized_guest() + g.user = guest + + roles = security_manager.get_user_roles() + self.assertEqual(guest.roles, roles) + + +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + EMBEDDED_SUPERSET=True, +) +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") +class TestGuestUserDashboardAccess(SupersetTestCase): + def setUp(self) -> None: + self.dash = db.session.query(Dashboard).filter_by(slug="births").first() + self.embedded = EmbeddedDAO.upsert(self.dash, []) + self.authorized_guest = security_manager.get_guest_user_from_token( + { + "user": {}, + "resources": [{"type": "dashboard", "id": str(self.embedded.uuid)}], + } + ) + self.unauthorized_guest = security_manager.get_guest_user_from_token( + { + "user": {}, + "resources": [ + {"type": "dashboard", "id": "06383667-3e02-4e5e-843f-44e9c5896b6c"} + ], + } + ) + def test_has_guest_access__regular_user(self): g.user = security_manager.find_user("admin") - has_guest_access = security_manager.has_guest_access( - GuestTokenResourceType.DASHBOARD, self.resource_id - ) + has_guest_access = security_manager.has_guest_access(self.dash) self.assertFalse(has_guest_access) def test_has_guest_access__anonymous_user(self): g.user = security_manager.get_anonymous_user() - has_guest_access = security_manager.has_guest_access( - GuestTokenResourceType.DASHBOARD, self.resource_id - ) + has_guest_access = security_manager.has_guest_access(self.dash) self.assertFalse(has_guest_access) def test_has_guest_access__authorized_guest_user(self): - g.user = self.authorized_guest() - has_guest_access = security_manager.has_guest_access( - GuestTokenResourceType.DASHBOARD, self.resource_id - ) + g.user = self.authorized_guest + has_guest_access = security_manager.has_guest_access(self.dash) self.assertTrue(has_guest_access) def test_has_guest_access__authorized_guest_user__non_zero_resource_index(self): - guest = self.authorized_guest() + # set up a user who has authorized access, plus another resource + guest = self.authorized_guest guest.resources = [ - {"type": "dashboard", "id": self.resource_id - 1} + {"type": "dashboard", "id": "not-a-real-id"} ] + guest.resources g.user = guest - has_guest_access = security_manager.has_guest_access( - GuestTokenResourceType.DASHBOARD, self.resource_id - ) + has_guest_access = security_manager.has_guest_access(self.dash) self.assertTrue(has_guest_access) def test_has_guest_access__unauthorized_guest_user__different_resource_id(self): g.user = security_manager.get_guest_user_from_token( { "user": {}, - "resources": [{"type": "dashboard", "id": self.resource_id - 1}], + "resources": [{"type": "dashboard", "id": "not-a-real-id"}], } ) - has_guest_access = security_manager.has_guest_access( - GuestTokenResourceType.DASHBOARD, self.resource_id - ) + has_guest_access = security_manager.has_guest_access(self.dash) self.assertFalse(has_guest_access) def test_has_guest_access__unauthorized_guest_user__different_resource_type(self): g.user = security_manager.get_guest_user_from_token( - {"user": {}, "resources": [{"type": "dirt", "id": self.resource_id}]} - ) - has_guest_access = security_manager.has_guest_access( - GuestTokenResourceType.DASHBOARD, self.resource_id + {"user": {}, "resources": [{"type": "dirt", "id": self.embedded.uuid}]} ) + has_guest_access = security_manager.has_guest_access(self.dash) self.assertFalse(has_guest_access) - def test_get_guest_user_roles_explicit(self): - guest = self.authorized_guest() - roles = security_manager.get_user_roles(guest) - self.assertEqual(guest.roles, roles) - - def test_get_guest_user_roles_implicit(self): - guest = self.authorized_guest() - g.user = guest - - roles = security_manager.get_user_roles() - self.assertEqual(guest.roles, roles) - - -@mock.patch.dict( - "superset.extensions.feature_flag_manager._feature_flags", - EMBEDDED_SUPERSET=True, -) -@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") -class TestGuestUserDashboardAccess(SupersetTestCase): - def setUp(self) -> None: - self.dash = db.session.query(Dashboard).filter_by(slug="births").first() - self.authorized_guest = security_manager.get_guest_user_from_token( - {"user": {}, "resources": [{"type": "dashboard", "id": self.dash.id}]} - ) - self.unauthorized_guest = security_manager.get_guest_user_from_token( - {"user": {}, "resources": [{"type": "dashboard", "id": self.dash.id + 1}]} - ) - def test_chart_raise_for_access_as_guest(self): chart = self.dash.slices[0] g.user = self.authorized_guest diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py index d8f608ecfc2b3..0b3a8b1d82d88 100644 --- a/tests/integration_tests/security_tests.py +++ b/tests/integration_tests/security_tests.py @@ -888,7 +888,9 @@ def test_views_are_secured(self): ["AuthDBView", "login"], ["AuthDBView", "logout"], ["CurrentUserRestApi", "get_me"], + # TODO (embedded) remove Dashboard:embedded after uuids have been shipped ["Dashboard", "embedded"], + ["EmbeddedView", "embedded"], ["R", "index"], ["Superset", "log"], ["Superset", "theme"], diff --git a/tests/unit_tests/migrations/__init__.py b/tests/unit_tests/migrations/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/migrations/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/migrations/shared/__init__.py b/tests/unit_tests/migrations/shared/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/migrations/shared/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/migrations/shared/utils_test.py b/tests/unit_tests/migrations/shared/utils_test.py new file mode 100644 index 0000000000000..cb5b2cbd0e82b --- /dev/null +++ b/tests/unit_tests/migrations/shared/utils_test.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=import-outside-toplevel, unused-argument + +""" +Test the SIP-68 migration. +""" + +from pytest_mock import MockerFixture + +from superset.sql_parse import Table + + +def test_extract_table_references(mocker: MockerFixture, app_context: None) -> None: + """ + Test the ``extract_table_references`` helper function. + """ + from superset.migrations.shared.utils import extract_table_references + + assert extract_table_references("SELECT 1", "trino") == set() + assert extract_table_references("SELECT 1 FROM some_table", "trino") == { + Table(table="some_table", schema=None, catalog=None) + } + assert extract_table_references( + "SELECT 1 FROM some_catalog.some_schema.some_table", "trino" + ) == {Table(table="some_table", schema="some_schema", catalog="some_catalog")} + assert extract_table_references( + "SELECT * FROM some_table JOIN other_table ON some_table.id = other_table.id", + "trino", + ) == { + Table(table="some_table", schema=None, catalog=None), + Table(table="other_table", schema=None, catalog=None), + } + + # test falling back to sqlparse + logger = mocker.patch("superset.migrations.shared.utils.logger") + sql = "SELECT * FROM table UNION ALL SELECT * FROM other_table" + assert extract_table_references( + sql, + "trino", + ) == {Table(table="other_table", schema=None, catalog=None)} + logger.warning.assert_called_with("Unable to parse query with sqloxide: %s", sql)